8231349: Move intrinsic stubs generation to compiler runtime initialization code
Reviewed-by: redestad, vlivanov
This commit is contained in:
parent
f37674a8f7
commit
3859faf183
@ -33,9 +33,11 @@
|
||||
// (see globals.hpp)
|
||||
|
||||
define_pd_global(bool, ImplicitNullChecks, true); // Generate code for implicit null checks
|
||||
define_pd_global(bool, TrapBasedNullChecks, false);
|
||||
define_pd_global(bool, TrapBasedNullChecks, false);
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap NULLs past to check cast
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, COMPILER2_OR_JVMCI);
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 64 COMPILER1_AND_COMPILER2_PRESENT(+64)); // Tiered compilation has large code-entry alignment.
|
||||
define_pd_global(intx, CodeEntryAlignment, 64);
|
||||
define_pd_global(intx, OptoLoopAlignment, 16);
|
||||
|
@ -570,7 +570,7 @@ void SharedRuntime::gen_i2c_adapter(MacroAssembler *masm,
|
||||
// caller, but with an uncorrected stack, causing delayed havoc.
|
||||
|
||||
if (VerifyAdapterCalls &&
|
||||
(Interpreter::code() != NULL || StubRoutines::code1() != NULL)) {
|
||||
(Interpreter::code() != NULL || StubRoutines::final_stubs_code() != NULL)) {
|
||||
#if 0
|
||||
// So, let's test for cascading c2i/i2c adapters right now.
|
||||
// assert(Interpreter::contains($return_addr) ||
|
||||
@ -578,18 +578,23 @@ void SharedRuntime::gen_i2c_adapter(MacroAssembler *masm,
|
||||
// "i2c adapter must return to an interpreter frame");
|
||||
__ block_comment("verify_i2c { ");
|
||||
Label L_ok;
|
||||
if (Interpreter::code() != NULL)
|
||||
if (Interpreter::code() != NULL) {
|
||||
range_check(masm, rax, r11,
|
||||
Interpreter::code()->code_start(), Interpreter::code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code1() != NULL)
|
||||
}
|
||||
if (StubRoutines::initial_stubs_code() != NULL) {
|
||||
range_check(masm, rax, r11,
|
||||
StubRoutines::code1()->code_begin(), StubRoutines::code1()->code_end(),
|
||||
StubRoutines::initial_stubs_code()->code_begin(),
|
||||
StubRoutines::initial_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code2() != NULL)
|
||||
}
|
||||
if (StubRoutines::final_stubs_code() != NULL) {
|
||||
range_check(masm, rax, r11,
|
||||
StubRoutines::code2()->code_begin(), StubRoutines::code2()->code_end(),
|
||||
StubRoutines::final_stubs_code()->code_begin(),
|
||||
StubRoutines::final_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
}
|
||||
const char* msg = "i2c adapter must return to an interpreter frame";
|
||||
__ block_comment(msg);
|
||||
__ stop(msg);
|
||||
|
@ -7997,7 +7997,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
|
||||
// Initialization
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generate initial stubs and initializes the entry points
|
||||
|
||||
// entry points that exist in all platforms Note: This is code
|
||||
@ -8023,6 +8023,12 @@ class StubGenerator: public StubCodeGenerator {
|
||||
generate_throw_exception("delayed StackOverflowError throw_exception",
|
||||
CAST_FROM_FN_PTR(address,
|
||||
SharedRuntime::throw_delayed_StackOverflowError));
|
||||
|
||||
// Initialize table for copy memory (arraycopy) check.
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(8);
|
||||
}
|
||||
|
||||
if (UseCRC32Intrinsics) {
|
||||
// set table address before stub generation which use it
|
||||
StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
|
||||
@ -8047,7 +8053,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -8057,7 +8063,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
if (VerifyOops) {
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
@ -8080,32 +8086,47 @@ class StubGenerator: public StubCodeGenerator {
|
||||
SharedRuntime::
|
||||
throw_NullPointerException_at_call));
|
||||
|
||||
if (UseSVE == 0) {
|
||||
StubRoutines::aarch64::_vector_iota_indices = generate_iota_indices("iota_indices");
|
||||
}
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
// countPositives stub for large arrays.
|
||||
StubRoutines::aarch64::_count_positives = generate_count_positives(StubRoutines::aarch64::_count_positives_long);
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != NULL) {
|
||||
StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
|
||||
StubRoutines::aarch64::_spin_wait = generate_spin_wait();
|
||||
|
||||
#if defined (LINUX) && !defined (__ARM_FEATURE_ATOMICS)
|
||||
|
||||
generate_atomic_entry_points();
|
||||
|
||||
#endif // LINUX
|
||||
|
||||
StubRoutines::aarch64::set_completed(); // Inidicate that arraycopy and zero_blocks stubs are generated
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
|
||||
if (UseSVE == 0) {
|
||||
StubRoutines::aarch64::_vector_iota_indices = generate_iota_indices("iota_indices");
|
||||
}
|
||||
|
||||
// array equals stub for large arrays.
|
||||
if (!UseSimpleArrayEquals) {
|
||||
StubRoutines::aarch64::_large_array_equals = generate_large_array_equals();
|
||||
}
|
||||
|
||||
// byte_array_inflate stub for large arrays.
|
||||
StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
|
||||
|
||||
// countPositives stub for large arrays.
|
||||
StubRoutines::aarch64::_count_positives = generate_count_positives(StubRoutines::aarch64::_count_positives_long);
|
||||
|
||||
generate_compare_long_strings();
|
||||
|
||||
generate_string_indexof_stubs();
|
||||
|
||||
// byte_array_inflate stub for large arrays.
|
||||
StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != NULL) {
|
||||
StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
#ifdef COMPILER2
|
||||
if (UseMultiplyToLenIntrinsic) {
|
||||
StubRoutines::_multiplyToLen = generate_multiplyToLen();
|
||||
@ -8192,36 +8213,33 @@ class StubGenerator: public StubCodeGenerator {
|
||||
if (UseAdler32Intrinsics) {
|
||||
StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
|
||||
}
|
||||
|
||||
StubRoutines::aarch64::_spin_wait = generate_spin_wait();
|
||||
|
||||
#if defined (LINUX) && !defined (__ARM_FEATURE_ATOMICS)
|
||||
|
||||
generate_atomic_entry_points();
|
||||
|
||||
#endif // LINUX
|
||||
|
||||
StubRoutines::aarch64::set_completed();
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}; // end class declaration
|
||||
|
||||
#define UCM_TABLE_MAX_ENTRIES 8
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == NULL) {
|
||||
UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
|
||||
}
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -35,8 +35,11 @@ static bool returns_to_call_stub(address return_pc) {
|
||||
}
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 19000, // simply increase if too small (assembler will crash if too small)
|
||||
code_size2 = 45000 // simply increase if too small (assembler will crash if too small)
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 10000,
|
||||
_continuation_stubs_code_size = 2000,
|
||||
_compiler_stubs_code_size = 30000,
|
||||
_final_stubs_code_size = 20000
|
||||
};
|
||||
|
||||
class aarch64 {
|
||||
|
@ -34,6 +34,8 @@ define_pd_global(bool, ImplicitNullChecks, true); // Generate code for i
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap nulls past to check cast
|
||||
define_pd_global(bool, TrapBasedNullChecks, false); // Not needed
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, false); // No need - only few compiler's stubs
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 64 COMPILER1_AND_COMPILER2_PRESENT(+64)); // Tiered compilation has large code-entry alignment.
|
||||
define_pd_global(intx, CodeEntryAlignment, 16);
|
||||
define_pd_global(intx, OptoLoopAlignment, 16);
|
||||
|
@ -3077,7 +3077,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
//---------------------------------------------------------------------------
|
||||
// Initialization
|
||||
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
//------------------------------------------------------------------------------------------------------------------------
|
||||
@ -3094,6 +3094,10 @@ class StubGenerator: public StubCodeGenerator {
|
||||
// stub for throwing stack overflow error used both by interpreter and compiler
|
||||
StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
|
||||
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(32);
|
||||
}
|
||||
|
||||
// integer division used both by interpreter and compiler
|
||||
StubRoutines::Arm::_idiv_irem_entry = generate_idiv_irem();
|
||||
|
||||
@ -3106,7 +3110,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -3116,14 +3120,9 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
#ifdef COMPILER2
|
||||
// Generate partial_subtype_check first here since its code depends on
|
||||
// UseZeroBaseCompressedOops which is defined after heap initialization.
|
||||
StubRoutines::Arm::_partial_subtype_check = generate_partial_subtype_check();
|
||||
#endif
|
||||
// These entry points require SharedInfo::stack0 to be set up in non-core builds
|
||||
// and need to be relocatable, so they each fabricate a RuntimeStub internally.
|
||||
StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
|
||||
@ -3144,6 +3143,14 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::Arm::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#ifdef COMPILER2
|
||||
// Generate partial_subtype_check first here since its code depends on
|
||||
// UseZeroBaseCompressedOops which is defined after heap initialization.
|
||||
StubRoutines::Arm::_partial_subtype_check = generate_partial_subtype_check();
|
||||
|
||||
#ifdef COMPILE_CRYPTO
|
||||
// generate AES intrinsics code
|
||||
if (UseAESIntrinsics) {
|
||||
@ -3154,25 +3161,31 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt();
|
||||
}
|
||||
#endif // COMPILE_CRYPTO
|
||||
#endif // COMPILER2
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1();
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}; // end class declaration
|
||||
|
||||
#define UCM_TABLE_MAX_ENTRIES 32
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
|
||||
}
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2008, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2008, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -30,8 +30,11 @@
|
||||
// extend it.
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 9000, // simply increase if too small (assembler will crash if too small)
|
||||
code_size2 = 22000 // simply increase if too small (assembler will crash if too small)
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 9000,
|
||||
_continuation_stubs_code_size = 2000,
|
||||
_compiler_stubs_code_size = 22000,
|
||||
_final_stubs_code_size = 22000
|
||||
};
|
||||
|
||||
class Arm {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2002, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2002, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2012, 2020 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -36,6 +36,8 @@ define_pd_global(bool, ImplicitNullChecks, true); // Generate code for impli
|
||||
define_pd_global(bool, TrapBasedNullChecks, true);
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap NULLs passed to check cast.
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, COMPILER2_OR_JVMCI);
|
||||
|
||||
#define DEFAULT_STACK_YELLOW_PAGES (2)
|
||||
#define DEFAULT_STACK_RED_PAGES (1)
|
||||
// Java_java_net_SocketOutputStream_socketWrite0() uses a 64k buffer on the
|
||||
|
@ -4650,7 +4650,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
|
||||
// Initialization
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// Entry points that exist in all platforms.
|
||||
@ -4663,6 +4663,10 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_call_stub_entry = generate_call_stub(StubRoutines::_call_stub_return_address);
|
||||
StubRoutines::_catch_exception_entry = generate_catch_exception();
|
||||
|
||||
if (UnsafeCopyMemory::_table == NULL) {
|
||||
UnsafeCopyMemory::create_table(8);
|
||||
}
|
||||
|
||||
// Build this early so it's available for the interpreter.
|
||||
StubRoutines::_throw_StackOverflowError_entry =
|
||||
generate_throw_exception("StackOverflowError throw_exception",
|
||||
@ -4684,7 +4688,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -4694,7 +4698,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// These entry points require SharedInfo::stack0 to be set up in
|
||||
@ -4715,6 +4719,10 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
|
||||
#ifdef COMPILER2
|
||||
if (UseMultiplyToLenIntrinsic) {
|
||||
@ -4763,24 +4771,31 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
|
||||
}
|
||||
#endif
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
#define UCM_TABLE_MAX_ENTRIES 8
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == NULL) {
|
||||
UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
|
||||
}
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2002, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2002, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2012, 2019 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -33,8 +33,11 @@
|
||||
static bool returns_to_call_stub(address return_pc) { return return_pc == _call_stub_return_address; }
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 20000, // simply increase if too small (assembler will crash if too small)
|
||||
code_size2 = 24000 // simply increase if too small (assembler will crash if too small)
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 20000,
|
||||
_continuation_stubs_code_size = 2000,
|
||||
_compiler_stubs_code_size = 24000,
|
||||
_final_stubs_code_size = 24000
|
||||
};
|
||||
|
||||
// CRC32 Intrinsics.
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2000, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2000, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2020, 2023, Huawei Technologies Co., Ltd. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -36,6 +36,8 @@ define_pd_global(bool, ImplicitNullChecks, true); // Generate code for im
|
||||
define_pd_global(bool, TrapBasedNullChecks, false);
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap NULLs past to check cast
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, COMPILER2_OR_JVMCI);
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 64 COMPILER1_AND_COMPILER2_PRESENT(+64)); // Tiered compilation has large code-entry alignment.
|
||||
define_pd_global(intx, CodeEntryAlignment, 64);
|
||||
define_pd_global(intx, OptoLoopAlignment, 16);
|
||||
|
@ -4003,7 +4003,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
#undef __
|
||||
|
||||
// Initialization
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generate initial stubs and initializes the entry points
|
||||
|
||||
// entry points that exist in all platforms Note: This is code
|
||||
@ -4014,6 +4014,10 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
StubRoutines::_forward_exception_entry = generate_forward_exception();
|
||||
|
||||
if (UnsafeCopyMemory::_table == NULL) {
|
||||
UnsafeCopyMemory::create_table(8);
|
||||
}
|
||||
|
||||
StubRoutines::_call_stub_entry =
|
||||
generate_call_stub(StubRoutines::_call_stub_return_address);
|
||||
|
||||
@ -4031,7 +4035,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
SharedRuntime::throw_delayed_StackOverflowError));
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -4041,7 +4045,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
if (VerifyOops) {
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
@ -4067,6 +4071,16 @@ class StubGenerator: public StubCodeGenerator {
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != NULL) {
|
||||
StubRoutines::riscv::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
|
||||
StubRoutines::riscv::set_completed();
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
#ifdef COMPILER2
|
||||
if (UseMulAddIntrinsic) {
|
||||
StubRoutines::_mulAdd = generate_mulAdd();
|
||||
@ -4096,37 +4110,36 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
|
||||
StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
|
||||
}
|
||||
#endif
|
||||
#endif // COMPILER2
|
||||
|
||||
generate_compare_long_strings();
|
||||
|
||||
generate_string_indexof_stubs();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != NULL) {
|
||||
StubRoutines::riscv::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
|
||||
StubRoutines::riscv::set_completed();
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}; // end class declaration
|
||||
|
||||
#define UCM_TABLE_MAX_ENTRIES 8
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == NULL) {
|
||||
UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
|
||||
}
|
||||
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
|
||||
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
@ -36,8 +36,11 @@ static bool returns_to_call_stub(address return_pc) {
|
||||
}
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 19000, // simply increase if too small (assembler will crash if too small)
|
||||
code_size2 = 28000 // simply increase if too small (assembler will crash if too small)
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 19000,
|
||||
_continuation_stubs_code_size = 2000,
|
||||
_compiler_stubs_code_size = 28000,
|
||||
_final_stubs_code_size = 28000
|
||||
};
|
||||
|
||||
class riscv {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2018 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -36,6 +36,8 @@ define_pd_global(bool, ImplicitNullChecks, true); // Generate code fo
|
||||
define_pd_global(bool, TrapBasedNullChecks, true);
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap NULLs passed to check cast.
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, COMPILER2_OR_JVMCI);
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 256);
|
||||
// This shall be at least 32 for proper branch target alignment.
|
||||
// Ideally, this is 256 (cache line size). This keeps code end data
|
||||
|
@ -3087,7 +3087,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
}
|
||||
#endif // INCLUD_JFR
|
||||
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points.
|
||||
|
||||
// Entry points that exist in all platforms.
|
||||
@ -3125,7 +3125,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::zarch::_trot_table_addr = (address)StubRoutines::zarch::_trot_table;
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
if (!Continuations::enabled()) return;
|
||||
|
||||
// Continuation stubs:
|
||||
@ -3137,7 +3137,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// Generates all stubs and initializes the entry points.
|
||||
|
||||
StubRoutines::zarch::_partial_subtype_check = generate_partial_subtype_check();
|
||||
@ -3152,7 +3152,10 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
// Arraycopy stubs used by compilers.
|
||||
generate_arraycopy_stubs();
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
// Generate AES intrinsics code.
|
||||
if (UseAESIntrinsics) {
|
||||
if (VM_Version::has_Crypto_AES()) {
|
||||
@ -3215,18 +3218,28 @@ class StubGenerator: public StubCodeGenerator {
|
||||
= CAST_FROM_FN_PTR(address, SharedRuntime::montgomery_square);
|
||||
}
|
||||
#endif
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
_stub_count = (phase == 0) ? 0x100 : 0x200;
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
|
||||
private:
|
||||
@ -3263,6 +3276,6 @@ class StubGenerator: public StubCodeGenerator {
|
||||
|
||||
};
|
||||
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2017 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -32,9 +32,11 @@
|
||||
static bool returns_to_call_stub(address return_pc) { return return_pc == _call_stub_return_address; }
|
||||
|
||||
enum { // Platform dependent constants.
|
||||
// TODO: May be able to shrink this a lot
|
||||
code_size1 = 20000, // Simply increase if too small (assembler will crash if too small).
|
||||
code_size2 = 20000 // Simply increase if too small (assembler will crash if too small).
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 20000,
|
||||
_continuation_stubs_code_size = 2000,
|
||||
_compiler_stubs_code_size = 20000,
|
||||
_final_stubs_code_size = 20000
|
||||
};
|
||||
|
||||
// MethodHandles adapters
|
||||
|
@ -35,6 +35,8 @@ define_pd_global(bool, ImplicitNullChecks, true); // Generate code for im
|
||||
define_pd_global(bool, TrapBasedNullChecks, false); // Not needed on x86.
|
||||
define_pd_global(bool, UncommonNullCast, true); // Uncommon-trap nulls passed to check cast
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, COMPILER2_OR_JVMCI);
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 64 COMPILER1_AND_COMPILER2_PRESENT(+64)); // Tiered compilation has large code-entry alignment.
|
||||
// See 4827828 for this change. There is no globals_core_i486.hpp. I can't
|
||||
// assign a different value for C2 without touching a number of files. Use
|
||||
|
@ -745,25 +745,30 @@ void SharedRuntime::gen_i2c_adapter(MacroAssembler *masm,
|
||||
__ movptr(rax, Address(rsp, 0));
|
||||
|
||||
if (VerifyAdapterCalls &&
|
||||
(Interpreter::code() != nullptr || StubRoutines::code1() != nullptr)) {
|
||||
(Interpreter::code() != nullptr || StubRoutines::final_stubs_code() != nullptr)) {
|
||||
// So, let's test for cascading c2i/i2c adapters right now.
|
||||
// assert(Interpreter::contains($return_addr) ||
|
||||
// StubRoutines::contains($return_addr),
|
||||
// "i2c adapter must return to an interpreter frame");
|
||||
__ block_comment("verify_i2c { ");
|
||||
Label L_ok;
|
||||
if (Interpreter::code() != nullptr)
|
||||
if (Interpreter::code() != nullptr) {
|
||||
range_check(masm, rax, rdi,
|
||||
Interpreter::code()->code_start(), Interpreter::code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code1() != nullptr)
|
||||
}
|
||||
if (StubRoutines::initial_stubs_code() != nullptr) {
|
||||
range_check(masm, rax, rdi,
|
||||
StubRoutines::code1()->code_begin(), StubRoutines::code1()->code_end(),
|
||||
StubRoutines::initial_stubs_code()->code_begin(),
|
||||
StubRoutines::initial_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code2() != nullptr)
|
||||
}
|
||||
if (StubRoutines::final_stubs_code() != nullptr) {
|
||||
range_check(masm, rax, rdi,
|
||||
StubRoutines::code2()->code_begin(), StubRoutines::code2()->code_end(),
|
||||
StubRoutines::final_stubs_code()->code_begin(),
|
||||
StubRoutines::final_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
}
|
||||
const char* msg = "i2c adapter must return to an interpreter frame";
|
||||
__ block_comment(msg);
|
||||
__ stop(msg);
|
||||
|
@ -796,7 +796,7 @@ void SharedRuntime::gen_i2c_adapter(MacroAssembler *masm,
|
||||
// caller, but with an uncorrected stack, causing delayed havoc.
|
||||
|
||||
if (VerifyAdapterCalls &&
|
||||
(Interpreter::code() != nullptr || StubRoutines::code1() != nullptr)) {
|
||||
(Interpreter::code() != nullptr || StubRoutines::final_stubs_code() != nullptr)) {
|
||||
// So, let's test for cascading c2i/i2c adapters right now.
|
||||
// assert(Interpreter::contains($return_addr) ||
|
||||
// StubRoutines::contains($return_addr),
|
||||
@ -805,18 +805,24 @@ void SharedRuntime::gen_i2c_adapter(MacroAssembler *masm,
|
||||
// Pick up the return address
|
||||
__ movptr(rax, Address(rsp, 0));
|
||||
Label L_ok;
|
||||
if (Interpreter::code() != nullptr)
|
||||
if (Interpreter::code() != nullptr) {
|
||||
range_check(masm, rax, r11,
|
||||
Interpreter::code()->code_start(), Interpreter::code()->code_end(),
|
||||
Interpreter::code()->code_start(),
|
||||
Interpreter::code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code1() != nullptr)
|
||||
}
|
||||
if (StubRoutines::initial_stubs_code() != nullptr) {
|
||||
range_check(masm, rax, r11,
|
||||
StubRoutines::code1()->code_begin(), StubRoutines::code1()->code_end(),
|
||||
StubRoutines::initial_stubs_code()->code_begin(),
|
||||
StubRoutines::initial_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
if (StubRoutines::code2() != nullptr)
|
||||
}
|
||||
if (StubRoutines::final_stubs_code() != nullptr) {
|
||||
range_check(masm, rax, r11,
|
||||
StubRoutines::code2()->code_begin(), StubRoutines::code2()->code_end(),
|
||||
StubRoutines::final_stubs_code()->code_begin(),
|
||||
StubRoutines::final_stubs_code()->code_end(),
|
||||
L_ok);
|
||||
}
|
||||
const char* msg = "i2c adapter must return to an interpreter frame";
|
||||
__ block_comment(msg);
|
||||
__ stop(msg);
|
||||
|
@ -4062,7 +4062,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
//---------------------------------------------------------------------------
|
||||
// Initialization
|
||||
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
//------------------------------------------------------------------------------------------------------------------------
|
||||
@ -4079,6 +4079,11 @@ class StubGenerator: public StubCodeGenerator {
|
||||
// platform dependent
|
||||
create_control_words();
|
||||
|
||||
// Initialize table for copy memory (arraycopy) check.
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(16);
|
||||
}
|
||||
|
||||
StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
|
||||
StubRoutines::x86::_verify_fpu_cntrl_wrd_entry = generate_verify_fpu_cntrl_wrd();
|
||||
StubRoutines::x86::_d2i_wrapper = generate_d2i_wrapper(T_INT, CAST_FROM_FN_PTR(address, SharedRuntime::d2i));
|
||||
@ -4137,7 +4142,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
void generate_phase1() {
|
||||
void generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -4147,7 +4152,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// These entry points require SharedInfo::stack0 to be set up in non-core builds
|
||||
@ -4156,8 +4161,22 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
|
||||
StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
|
||||
|
||||
//------------------------------------------------------------------------------------------------------------------------
|
||||
// entry points that are platform specific
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != nullptr) {
|
||||
StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
}
|
||||
|
||||
void generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
|
||||
// entry points that are C2/JVMCI specific
|
||||
|
||||
StubRoutines::x86::_vector_float_sign_mask = generate_vector_mask("vector_float_sign_mask", 0x7FFFFFFF);
|
||||
StubRoutines::x86::_vector_float_sign_flip = generate_vector_mask("vector_float_sign_flip", 0x80000000);
|
||||
@ -4190,12 +4209,6 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::x86::_vector_popcount_lut = generate_popcount_avx_lut("popcount_lut");
|
||||
}
|
||||
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
// don't bother generating these AES intrinsic stubs unless global flag is set
|
||||
if (UseAESIntrinsics) {
|
||||
StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
|
||||
@ -4229,30 +4242,32 @@ class StubGenerator: public StubCodeGenerator {
|
||||
if (UseGHASHIntrinsics) {
|
||||
StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
|
||||
}
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != nullptr) {
|
||||
StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}; // end class declaration
|
||||
|
||||
#define UCM_TABLE_MAX_ENTRIES 16
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
|
||||
}
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
@ -3862,12 +3862,17 @@ void StubGenerator::create_control_words() {
|
||||
}
|
||||
|
||||
// Initialization
|
||||
void StubGenerator::generate_initial() {
|
||||
void StubGenerator::generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// This platform-specific settings are needed by generate_call_stub()
|
||||
create_control_words();
|
||||
|
||||
// Initialize table for unsafe copy memeory check.
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(16);
|
||||
}
|
||||
|
||||
// entry points that exist in all platforms Note: This is code
|
||||
// that could be shared among different platforms - however the
|
||||
// benefit seems to be smaller than the disadvantage of having a
|
||||
@ -3917,10 +3922,6 @@ void StubGenerator::generate_initial() {
|
||||
StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
|
||||
}
|
||||
|
||||
if (UsePoly1305Intrinsics) {
|
||||
StubRoutines::_poly1305_processBlocks = generate_poly1305_processBlocks();
|
||||
}
|
||||
|
||||
if (UseCRC32CIntrinsics) {
|
||||
bool supports_clmul = VM_Version::supports_clmul();
|
||||
StubRoutines::x86::generate_CRC32C_table(supports_clmul);
|
||||
@ -3928,10 +3929,6 @@ void StubGenerator::generate_initial() {
|
||||
StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
|
||||
}
|
||||
|
||||
if (UseAdler32Intrinsics) {
|
||||
StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
|
||||
}
|
||||
|
||||
if (VM_Version::supports_float16()) {
|
||||
// For results consistency both intrinsics should be enabled.
|
||||
// vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
|
||||
@ -3945,7 +3942,7 @@ void StubGenerator::generate_initial() {
|
||||
generate_libm_stubs();
|
||||
}
|
||||
|
||||
void StubGenerator::generate_phase1() {
|
||||
void StubGenerator::generate_continuation_stubs() {
|
||||
// Continuation stubs:
|
||||
StubRoutines::_cont_thaw = generate_cont_thaw();
|
||||
StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
|
||||
@ -3955,8 +3952,8 @@ void StubGenerator::generate_phase1() {
|
||||
JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
|
||||
}
|
||||
|
||||
void StubGenerator::generate_all() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
void StubGenerator::generate_final_stubs() {
|
||||
// Generates the rest of stubs and initializes the entry points
|
||||
|
||||
// These entry points require SharedInfo::stack0 to be set up in
|
||||
// non-core builds and need to be relocatable, so they each
|
||||
@ -3979,7 +3976,33 @@ void StubGenerator::generate_all() {
|
||||
SharedRuntime::
|
||||
throw_NullPointerException_at_call));
|
||||
|
||||
// entry points that are platform specific
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
if (VerifyOops) {
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
}
|
||||
|
||||
// data cache line writeback
|
||||
StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
|
||||
StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != nullptr) {
|
||||
StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
|
||||
if (UseVectorizedMismatchIntrinsic) {
|
||||
StubRoutines::_vectorizedMismatch = generate_vectorizedMismatch();
|
||||
}
|
||||
}
|
||||
|
||||
void StubGenerator::generate_compiler_stubs() {
|
||||
#if COMPILER2_OR_JVMCI
|
||||
|
||||
// Entry points that are C2 compiler specific.
|
||||
|
||||
StubRoutines::x86::_vector_float_sign_mask = generate_vector_mask("vector_float_sign_mask", 0x7FFFFFFF7FFFFFFF);
|
||||
StubRoutines::x86::_vector_float_sign_flip = generate_vector_mask("vector_float_sign_flip", 0x8000000080000000);
|
||||
StubRoutines::x86::_vector_double_sign_mask = generate_vector_mask("vector_double_sign_mask", 0x7FFFFFFFFFFFFFFF);
|
||||
@ -4011,34 +4034,32 @@ void StubGenerator::generate_all() {
|
||||
StubRoutines::x86::_vector_popcount_lut = generate_popcount_avx_lut("popcount_lut");
|
||||
}
|
||||
|
||||
// support for verify_oop (must happen after universe_init)
|
||||
if (VerifyOops) {
|
||||
StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
|
||||
}
|
||||
|
||||
// data cache line writeback
|
||||
StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
|
||||
StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
|
||||
|
||||
// arraycopy stubs used by compilers
|
||||
generate_arraycopy_stubs();
|
||||
|
||||
generate_aes_stubs();
|
||||
|
||||
generate_ghash_stubs();
|
||||
|
||||
generate_chacha_stubs();
|
||||
|
||||
if (UseAdler32Intrinsics) {
|
||||
StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
|
||||
}
|
||||
|
||||
if (UsePoly1305Intrinsics) {
|
||||
StubRoutines::_poly1305_processBlocks = generate_poly1305_processBlocks();
|
||||
}
|
||||
|
||||
if (UseMD5Intrinsics) {
|
||||
StubRoutines::_md5_implCompress = generate_md5_implCompress(false, "md5_implCompress");
|
||||
StubRoutines::_md5_implCompressMB = generate_md5_implCompress(true, "md5_implCompressMB");
|
||||
}
|
||||
|
||||
if (UseSHA1Intrinsics) {
|
||||
StubRoutines::x86::_upper_word_mask_addr = generate_upper_word_mask();
|
||||
StubRoutines::x86::_shuffle_byte_flip_mask_addr = generate_shuffle_byte_flip_mask();
|
||||
StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress");
|
||||
StubRoutines::_sha1_implCompressMB = generate_sha1_implCompress(true, "sha1_implCompressMB");
|
||||
}
|
||||
|
||||
if (UseSHA256Intrinsics) {
|
||||
StubRoutines::x86::_k256_adr = (address)StubRoutines::x86::_k256;
|
||||
char* dst = (char*)StubRoutines::x86::_k256_W;
|
||||
@ -4052,6 +4073,7 @@ void StubGenerator::generate_all() {
|
||||
StubRoutines::_sha256_implCompress = generate_sha256_implCompress(false, "sha256_implCompress");
|
||||
StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(true, "sha256_implCompressMB");
|
||||
}
|
||||
|
||||
if (UseSHA512Intrinsics) {
|
||||
StubRoutines::x86::_k512_W_addr = (address)StubRoutines::x86::_k512_W;
|
||||
StubRoutines::x86::_pshuffle_byte_flip_mask_addr_sha512 = generate_pshuffle_byte_flip_mask_sha512();
|
||||
@ -4084,10 +4106,6 @@ void StubGenerator::generate_all() {
|
||||
StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
|
||||
}
|
||||
|
||||
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
|
||||
if (bs_nm != nullptr) {
|
||||
StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
|
||||
}
|
||||
#ifdef COMPILER2
|
||||
if (UseMultiplyToLenIntrinsic) {
|
||||
StubRoutines::_multiplyToLen = generate_multiplyToLen();
|
||||
@ -4175,17 +4193,32 @@ void StubGenerator::generate_all() {
|
||||
}
|
||||
}
|
||||
#endif // COMPILER2
|
||||
|
||||
if (UseVectorizedMismatchIntrinsic) {
|
||||
StubRoutines::_vectorizedMismatch = generate_vectorizedMismatch();
|
||||
}
|
||||
#endif // COMPILER2_OR_JVMCI
|
||||
}
|
||||
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
if (UnsafeCopyMemory::_table == nullptr) {
|
||||
UnsafeCopyMemory::create_table(16);
|
||||
}
|
||||
StubGenerator g(code, phase);
|
||||
StubGenerator::StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
DEBUG_ONLY( _regs_in_thread = false; )
|
||||
switch(kind) {
|
||||
case Initial_stubs:
|
||||
generate_initial_stubs();
|
||||
break;
|
||||
case Continuation_stubs:
|
||||
generate_continuation_stubs();
|
||||
break;
|
||||
case Compiler_stubs:
|
||||
generate_compiler_stubs();
|
||||
break;
|
||||
case Final_stubs:
|
||||
generate_final_stubs();
|
||||
break;
|
||||
default:
|
||||
fatal("unexpected stubs kind: %d", kind);
|
||||
break;
|
||||
};
|
||||
}
|
||||
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
@ -550,21 +550,13 @@ class StubGenerator: public StubCodeGenerator {
|
||||
void create_control_words();
|
||||
|
||||
// Initialization
|
||||
void generate_initial();
|
||||
void generate_phase1();
|
||||
void generate_all();
|
||||
void generate_initial_stubs();
|
||||
void generate_continuation_stubs();
|
||||
void generate_compiler_stubs();
|
||||
void generate_final_stubs();
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, int phase) : StubCodeGenerator(code) {
|
||||
DEBUG_ONLY( _regs_in_thread = false; )
|
||||
if (phase == 0) {
|
||||
generate_initial();
|
||||
} else if (phase == 1) {
|
||||
generate_phase1(); // stubs that must be available for the interpreter
|
||||
} else {
|
||||
generate_all();
|
||||
}
|
||||
}
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind);
|
||||
};
|
||||
|
||||
#endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|
||||
|
@ -32,8 +32,13 @@
|
||||
static bool returns_to_call_stub(address return_pc) { return return_pc == _call_stub_return_address; }
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 20000 LP64_ONLY(+10000), // simply increase if too small (assembler will crash if too small)
|
||||
code_size2 = 35300 LP64_ONLY(+45000) WINDOWS_ONLY(+2048) // simply increase if too small (assembler will crash if too small)
|
||||
// simply increase sizes if too small (assembler will crash if too small)
|
||||
_initial_stubs_code_size = 20000 WINDOWS_ONLY(+1000),
|
||||
_continuation_stubs_code_size = 1000 LP64_ONLY(+1000),
|
||||
// AVX512 intrinsics add more code in 64-bit VM,
|
||||
// Windows have more code to save/restore registers
|
||||
_compiler_stubs_code_size = 20000 LP64_ONLY(+30000) WINDOWS_ONLY(+2000),
|
||||
_final_stubs_code_size = 10000 LP64_ONLY(+20000) WINDOWS_ONLY(+2000)
|
||||
};
|
||||
|
||||
class x86 {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2000, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2000, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright 2007, 2008, 2009, 2010, 2011 Red Hat, Inc.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -36,6 +36,8 @@ define_pd_global(bool, ImplicitNullChecks, true);
|
||||
define_pd_global(bool, TrapBasedNullChecks, false);
|
||||
define_pd_global(bool, UncommonNullCast, true);
|
||||
|
||||
define_pd_global(bool, DelayCompilerStubsGeneration, false); // Don't have compiler's stubs
|
||||
|
||||
define_pd_global(uintx, CodeCacheSegmentSize, 64 COMPILER1_AND_COMPILER2_PRESENT(+64)); // Tiered compilation has large code-entry alignment.
|
||||
define_pd_global(intx, CodeEntryAlignment, 32);
|
||||
define_pd_global(intx, OptoLoopAlignment, 16);
|
||||
|
@ -176,7 +176,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_oop_arraycopy;
|
||||
}
|
||||
|
||||
void generate_initial() {
|
||||
void generate_initial_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// entry points that exist in all platforms Note: This is code
|
||||
@ -197,7 +197,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
StubRoutines::_fence_entry = ShouldNotCallThisStub();
|
||||
}
|
||||
|
||||
void generate_all() {
|
||||
void generate_final_stubs() {
|
||||
// Generates all stubs and initializes the entry points
|
||||
|
||||
// These entry points require SharedInfo::stack0 to be set up in
|
||||
@ -222,17 +222,17 @@ class StubGenerator: public StubCodeGenerator {
|
||||
}
|
||||
|
||||
public:
|
||||
StubGenerator(CodeBuffer* code, bool all) : StubCodeGenerator(code) {
|
||||
if (all) {
|
||||
generate_all();
|
||||
} else {
|
||||
generate_initial();
|
||||
StubGenerator(CodeBuffer* code, StubsKind kind) : StubCodeGenerator(code) {
|
||||
if (kind == Initial_stubs) {
|
||||
generate_initial_stubs();
|
||||
} else if (kind == Final_stubs) {
|
||||
generate_final_stubs();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
void StubGenerator_generate(CodeBuffer* code, int phase) {
|
||||
StubGenerator g(code, phase);
|
||||
void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind) {
|
||||
StubGenerator g(code, kind);
|
||||
}
|
||||
|
||||
EntryFrame *EntryFrame::build(const intptr_t* parameters,
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright 2007, 2008, 2009, 2010 Red Hat, Inc.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
@ -40,9 +40,13 @@
|
||||
}
|
||||
|
||||
enum platform_dependent_constants {
|
||||
code_size1 = 0, // The assembler will fail with a guarantee
|
||||
code_size2 = 0 // if these are too small. Simply increase
|
||||
}; // them if that happens.
|
||||
// The assembler will fail with a guarantee if these are too small.
|
||||
// Simply increase them if that happens.
|
||||
_initial_stubs_code_size = 0,
|
||||
_continuation_stubs_code_size = 0,
|
||||
_compiler_stubs_code_size = 0,
|
||||
_final_stubs_code_size = 0
|
||||
};
|
||||
|
||||
enum method_handles_platform_dependent_constants {
|
||||
method_handles_adapters_code_size = 0
|
||||
|
@ -554,7 +554,7 @@ void os::verify_stack_alignment() {
|
||||
#ifdef AMD64
|
||||
// The current_stack_pointer() calls generated get_previous_sp stub routine.
|
||||
// Only enable the assert after the routine becomes available.
|
||||
if (StubRoutines::code1() != nullptr) {
|
||||
if (StubRoutines::initial_stubs_code() != nullptr) {
|
||||
assert(((intptr_t)os::current_stack_pointer() & (StackAlignmentInBytes-1)) == 0, "incorrect stack alignment");
|
||||
}
|
||||
#endif
|
||||
|
@ -54,13 +54,15 @@ JVMCICompiler* JVMCICompiler::instance(bool require_non_null, TRAPS) {
|
||||
return _instance;
|
||||
}
|
||||
|
||||
void compiler_stubs_init(bool in_compiler_thread);
|
||||
|
||||
// Initialization
|
||||
void JVMCICompiler::initialize() {
|
||||
assert(!CompilerConfig::is_c1_or_interpreter_only_no_jvmci(), "JVMCI is launched, it's not c1/interpreter only mode");
|
||||
if (!UseCompiler || !EnableJVMCI || !UseJVMCICompiler || !should_perform_init()) {
|
||||
return;
|
||||
}
|
||||
|
||||
compiler_stubs_init(true /* in_compiler_thread */); // generate compiler's intrinsics stubs
|
||||
set_state(initialized);
|
||||
}
|
||||
|
||||
|
@ -179,6 +179,7 @@ class outputStream;
|
||||
LOG_TAG(streaming) \
|
||||
LOG_TAG(stringdedup) \
|
||||
LOG_TAG(stringtable) \
|
||||
LOG_TAG(stubs) \
|
||||
LOG_TAG(subclass) \
|
||||
LOG_TAG(survivor) \
|
||||
LOG_TAG(suspend) \
|
||||
|
@ -55,6 +55,9 @@ const char* C2Compiler::retry_no_iterative_escape_analysis() {
|
||||
const char* C2Compiler::retry_class_loading_during_parsing() {
|
||||
return "retry class loading during parsing";
|
||||
}
|
||||
|
||||
void compiler_stubs_init(bool in_compiler_thread);
|
||||
|
||||
bool C2Compiler::init_c2_runtime() {
|
||||
|
||||
// Check assumptions used while running ADLC
|
||||
@ -74,6 +77,8 @@ bool C2Compiler::init_c2_runtime() {
|
||||
|
||||
DEBUG_ONLY( Node::init_NodeProperty(); )
|
||||
|
||||
compiler_stubs_init(true /* in_compiler_thread */); // generate compiler's intrinsics stubs
|
||||
|
||||
Compile::pd_compiler2_init();
|
||||
|
||||
CompilerThread* thread = CompilerThread::current();
|
||||
|
@ -7281,7 +7281,7 @@ bool LibraryCallKit::inline_digestBase_implCompress(vmIntrinsics::ID id) {
|
||||
}
|
||||
if (state == nullptr) return false;
|
||||
|
||||
assert(stubAddr != nullptr, "Stub is generated");
|
||||
assert(stubAddr != nullptr, "Stub %s is not generated", stubName);
|
||||
if (stubAddr == nullptr) return false;
|
||||
|
||||
// Call the stub.
|
||||
|
@ -357,7 +357,7 @@ const int ObjectAlignmentInBytes = 8;
|
||||
product(bool, UseVectorizedMismatchIntrinsic, false, DIAGNOSTIC, \
|
||||
"Enables intrinsification of ArraysSupport.vectorizedMismatch()") \
|
||||
\
|
||||
product(bool, UseVectorizedHashCodeIntrinsic, false, DIAGNOSTIC, \
|
||||
product(bool, UseVectorizedHashCodeIntrinsic, false, DIAGNOSTIC, \
|
||||
"Enables intrinsification of ArraysSupport.vectorizedHashCode()") \
|
||||
\
|
||||
product(bool, UseCopySignIntrinsic, false, DIAGNOSTIC, \
|
||||
@ -366,6 +366,9 @@ const int ObjectAlignmentInBytes = 8;
|
||||
product(bool, UseSignumIntrinsic, false, DIAGNOSTIC, \
|
||||
"Enables intrinsification of Math.signum") \
|
||||
\
|
||||
product_pd(bool, DelayCompilerStubsGeneration, DIAGNOSTIC, \
|
||||
"Use Compiler thread for compiler's stubs generation") \
|
||||
\
|
||||
product(ccstrlist, DisableIntrinsic, "", DIAGNOSTIC, \
|
||||
"do not expand intrinsics whose (internal) names appear here") \
|
||||
constraint(DisableIntrinsicConstraintFunc,AfterErgo) \
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -66,16 +66,18 @@ void classLoader_init1();
|
||||
void compilationPolicy_init();
|
||||
void codeCache_init();
|
||||
void VM_Version_init();
|
||||
void stubRoutines_init1();
|
||||
void stubRoutines_initContinuationStubs();
|
||||
jint universe_init(); // depends on codeCache_init and stubRoutines_init
|
||||
void initial_stubs_init();
|
||||
|
||||
jint universe_init(); // depends on codeCache_init and initial_stubs_init
|
||||
// depends on universe_init, must be before interpreter_init (currently only on SPARC)
|
||||
void gc_barrier_stubs_init();
|
||||
void interpreter_init_stub(); // before any methods loaded
|
||||
void interpreter_init_code(); // after methods loaded, but before they are linked
|
||||
void continuations_init(); // depends on flags (UseCompressedOops) and barrier sets
|
||||
void continuation_stubs_init(); // depend on continuations_init
|
||||
void interpreter_init_stub(); // before any methods loaded
|
||||
void interpreter_init_code(); // after methods loaded, but before they are linked
|
||||
void accessFlags_init();
|
||||
void InterfaceSupport_init();
|
||||
void universe2_init(); // dependent on codeCache_init and stubRoutines_init, loads primordial classes
|
||||
void universe2_init(); // dependent on codeCache_init and initial_stubs_init, loads primordial classes
|
||||
void referenceProcessor_init();
|
||||
void jni_handles_init();
|
||||
void vmStructs_init() NOT_DEBUG_RETURN;
|
||||
@ -89,10 +91,9 @@ void dependencies_init();
|
||||
|
||||
// Initialization after compiler initialization
|
||||
bool universe_post_init(); // must happen after compiler_init
|
||||
void javaClasses_init(); // must happen after vtable initialization
|
||||
void stubRoutines_init2(); // note: StubRoutines need 2-phase init
|
||||
|
||||
void continuations_init(); // depends on flags (UseCompressedOops) and barrier sets
|
||||
void javaClasses_init(); // must happen after vtable initialization
|
||||
void compiler_stubs_init(bool in_compiler_thread); // compiler's StubRoutines stubs
|
||||
void final_stubs_init(); // final StubRoutines stubs
|
||||
|
||||
// Do not disable thread-local-storage, as it is important for some
|
||||
// JNI/JVM/JVMTI functions and signal handlers to work properly
|
||||
@ -119,9 +120,9 @@ jint init_globals() {
|
||||
compilationPolicy_init();
|
||||
codeCache_init();
|
||||
VM_Version_init(); // depends on codeCache_init for emitting code
|
||||
stubRoutines_init1();
|
||||
initial_stubs_init();
|
||||
jint status = universe_init(); // dependent on codeCache_init and
|
||||
// stubRoutines_init1 and metaspace_init.
|
||||
// initial_stubs_init and metaspace_init.
|
||||
if (status != JNI_OK)
|
||||
return status;
|
||||
|
||||
@ -134,17 +135,17 @@ jint init_globals() {
|
||||
#endif // LEAK_SANITIZER
|
||||
|
||||
AsyncLogWriter::initialize();
|
||||
gc_barrier_stubs_init(); // depends on universe_init, must be before interpreter_init
|
||||
continuations_init(); // must precede continuation stub generation
|
||||
stubRoutines_initContinuationStubs(); // depends on continuations_init
|
||||
interpreter_init_stub(); // before methods get loaded
|
||||
gc_barrier_stubs_init(); // depends on universe_init, must be before interpreter_init
|
||||
continuations_init(); // must precede continuation stub generation
|
||||
continuation_stubs_init(); // depends on continuations_init
|
||||
interpreter_init_stub(); // before methods get loaded
|
||||
accessFlags_init();
|
||||
InterfaceSupport_init();
|
||||
VMRegImpl::set_regName(); // need this before generate_stubs (for printing oop maps).
|
||||
VMRegImpl::set_regName(); // need this before generate_stubs (for printing oop maps).
|
||||
SharedRuntime::generate_stubs();
|
||||
universe2_init(); // dependent on codeCache_init and stubRoutines_init1
|
||||
javaClasses_init();// must happen after vtable initialization, before referenceProcessor_init
|
||||
interpreter_init_code(); // after javaClasses_init and before any method gets linked
|
||||
universe2_init(); // dependent on codeCache_init and initial_stubs_init
|
||||
javaClasses_init(); // must happen after vtable initialization, before referenceProcessor_init
|
||||
interpreter_init_code(); // after javaClasses_init and before any method gets linked
|
||||
referenceProcessor_init();
|
||||
jni_handles_init();
|
||||
#if INCLUDE_VM_STRUCTS
|
||||
@ -169,7 +170,8 @@ jint init_globals() {
|
||||
if (!universe_post_init()) {
|
||||
return JNI_ERR;
|
||||
}
|
||||
stubRoutines_init2(); // note: StubRoutines need 2-phase init
|
||||
compiler_stubs_init(false /* in_compiler_thread */); // compiler's intrinsics stubs
|
||||
final_stubs_init(); // final StubRoutines stubs
|
||||
MethodHandles::generate_adapters();
|
||||
|
||||
// All the flags that get adjusted by VM_Version_init and os::init_2
|
||||
|
@ -2883,11 +2883,11 @@ AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_ada
|
||||
BasicType* sig_bt,
|
||||
bool allocate_code_blob) {
|
||||
|
||||
// StubRoutines::code2() is initialized after this function can be called. As a result,
|
||||
// VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated
|
||||
// prior to StubRoutines::code2() being set. Checks refer to checks generated in an I2C
|
||||
// stub that ensure that an I2C stub is called from an interpreter frame.
|
||||
bool contains_all_checks = StubRoutines::code2() != nullptr;
|
||||
// StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
|
||||
// VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
|
||||
// to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
|
||||
// in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
|
||||
bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
|
||||
|
||||
VMRegPair stack_regs[16];
|
||||
VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
|
||||
|
@ -51,6 +51,11 @@ void StubCodeDesc::freeze() {
|
||||
_frozen = true;
|
||||
}
|
||||
|
||||
void StubCodeDesc::unfreeze() {
|
||||
assert(_frozen, "repeated unfreeze operation");
|
||||
_frozen = false;
|
||||
}
|
||||
|
||||
void StubCodeDesc::print_on(outputStream* st) const {
|
||||
st->print("%s", group());
|
||||
st->print("::");
|
||||
@ -83,6 +88,13 @@ void StubCodeGenerator::stub_prolog(StubCodeDesc* cdesc) {
|
||||
}
|
||||
|
||||
void StubCodeGenerator::stub_epilog(StubCodeDesc* cdesc) {
|
||||
LogTarget(Debug, stubs) lt;
|
||||
if (lt.is_enabled()) {
|
||||
LogStream ls(lt);
|
||||
cdesc->print_on(&ls);
|
||||
ls.cr();
|
||||
}
|
||||
|
||||
if (_print_code) {
|
||||
#ifndef PRODUCT
|
||||
// Find the assembly code remarks in the outer CodeBuffer.
|
||||
|
@ -83,6 +83,7 @@ class StubCodeDesc: public CHeapObj<mtCode> {
|
||||
};
|
||||
|
||||
static void freeze();
|
||||
static void unfreeze();
|
||||
|
||||
const char* group() const { return _group; }
|
||||
const char* name() const { return _name; }
|
||||
@ -113,6 +114,21 @@ class StubCodeGenerator: public StackObj {
|
||||
|
||||
virtual void stub_prolog(StubCodeDesc* cdesc); // called by StubCodeMark constructor
|
||||
virtual void stub_epilog(StubCodeDesc* cdesc); // called by StubCodeMark destructor
|
||||
|
||||
enum StubsKind {
|
||||
Initial_stubs, // Stubs used by Runtime, Interpreter and compiled code.
|
||||
// Have to be generated very early during VM startup.
|
||||
|
||||
Continuation_stubs, // Stubs used by virtual threads.
|
||||
// Generated after GC barriers initialization but before
|
||||
// Interpreter initialization.
|
||||
|
||||
Compiler_stubs, // Intrinsics and other stubs used only by compiled code.
|
||||
// Can be generated by compiler (C2/JVMCI) thread based on
|
||||
// DelayCompilerStubsGeneration flag.
|
||||
|
||||
Final_stubs // The rest of stubs. Generated at the end of VM init.
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
@ -50,9 +50,10 @@ address UnsafeCopyMemory::_common_exit_stub_pc = nullptr;
|
||||
|
||||
// Class Variables
|
||||
|
||||
BufferBlob* StubRoutines::_code1 = nullptr;
|
||||
BufferBlob* StubRoutines::_code2 = nullptr;
|
||||
BufferBlob* StubRoutines::_code3 = nullptr;
|
||||
BufferBlob* StubRoutines::_initial_stubs_code = nullptr;
|
||||
BufferBlob* StubRoutines::_final_stubs_code = nullptr;
|
||||
BufferBlob* StubRoutines::_compiler_stubs_code = nullptr;
|
||||
BufferBlob* StubRoutines::_continuation_stubs_code = nullptr;
|
||||
|
||||
address StubRoutines::_call_stub_return_address = nullptr;
|
||||
address StubRoutines::_call_stub_entry = nullptr;
|
||||
@ -187,7 +188,7 @@ JFR_ONLY(address StubRoutines::_jfr_write_checkpoint = nullptr;)
|
||||
// The first one generates stubs needed during universe init (e.g., _handle_must_compile_first_entry).
|
||||
// The second phase includes all other stubs (which may depend on universe being initialized.)
|
||||
|
||||
extern void StubGenerator_generate(CodeBuffer* code, int phase); // only interface to generators
|
||||
extern void StubGenerator_generate(CodeBuffer* code, StubCodeGenerator::StubsKind kind); // only interface to generators
|
||||
|
||||
void UnsafeCopyMemory::create_table(int max_size) {
|
||||
UnsafeCopyMemory::_table = new UnsafeCopyMemory[max_size];
|
||||
@ -214,22 +215,63 @@ address UnsafeCopyMemory::page_error_continue_pc(address pc) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void StubRoutines::initialize1() {
|
||||
if (_code1 == nullptr) {
|
||||
ResourceMark rm;
|
||||
TraceTime timer("StubRoutines generation 1", TRACETIME_LOG(Info, startuptime));
|
||||
// Add extra space for large CodeEntryAlignment
|
||||
int max_aligned_stubs = 10;
|
||||
int size = code_size1 + CodeEntryAlignment * max_aligned_stubs;
|
||||
_code1 = BufferBlob::create("StubRoutines (1)", size);
|
||||
if (_code1 == nullptr) {
|
||||
vm_exit_out_of_memory(code_size1, OOM_MALLOC_ERROR, "CodeCache: no room for StubRoutines (1)");
|
||||
}
|
||||
CodeBuffer buffer(_code1);
|
||||
StubGenerator_generate(&buffer, 0);
|
||||
// When new stubs added we need to make sure there is some space left
|
||||
// to catch situation when we should increase size again.
|
||||
assert(code_size1 == 0 || buffer.insts_remaining() > 200, "increase code_size1");
|
||||
|
||||
static BufferBlob* initialize_stubs(StubCodeGenerator::StubsKind kind,
|
||||
int code_size, int max_aligned_stubs,
|
||||
const char* timer_msg,
|
||||
const char* buffer_name,
|
||||
const char* assert_msg) {
|
||||
ResourceMark rm;
|
||||
TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
|
||||
// Add extra space for large CodeEntryAlignment
|
||||
int size = code_size + CodeEntryAlignment * max_aligned_stubs;
|
||||
BufferBlob* stubs_code = BufferBlob::create(buffer_name, size);
|
||||
if (stubs_code == nullptr) {
|
||||
vm_exit_out_of_memory(code_size, OOM_MALLOC_ERROR, "CodeCache: no room for %s", buffer_name);
|
||||
}
|
||||
CodeBuffer buffer(stubs_code);
|
||||
StubGenerator_generate(&buffer, kind);
|
||||
// When new stubs added we need to make sure there is some space left
|
||||
// to catch situation when we should increase size again.
|
||||
assert(code_size == 0 || buffer.insts_remaining() > 200, "increase %s", assert_msg);
|
||||
|
||||
LogTarget(Info, stubs) lt;
|
||||
if (lt.is_enabled()) {
|
||||
LogStream ls(lt);
|
||||
ls.print_cr("%s\t [" INTPTR_FORMAT ", " INTPTR_FORMAT "] used: %d, free: %d",
|
||||
buffer_name, p2i(stubs_code->content_begin()), p2i(stubs_code->content_end()),
|
||||
buffer.total_content_size(), buffer.insts_remaining());
|
||||
}
|
||||
return stubs_code;
|
||||
}
|
||||
|
||||
void StubRoutines::initialize_initial_stubs() {
|
||||
if (_initial_stubs_code == nullptr) {
|
||||
_initial_stubs_code = initialize_stubs(StubCodeGenerator::Initial_stubs,
|
||||
_initial_stubs_code_size, 10,
|
||||
"StubRoutines generation initial stubs",
|
||||
"StubRoutines (initial stubs)",
|
||||
"_initial_stubs_code_size");
|
||||
}
|
||||
}
|
||||
|
||||
void StubRoutines::initialize_continuation_stubs() {
|
||||
if (_continuation_stubs_code == nullptr) {
|
||||
_continuation_stubs_code = initialize_stubs(StubCodeGenerator::Continuation_stubs,
|
||||
_continuation_stubs_code_size, 10,
|
||||
"StubRoutines generation continuation stubs",
|
||||
"StubRoutines (continuation stubs)",
|
||||
"_continuation_stubs_code_size");
|
||||
}
|
||||
}
|
||||
|
||||
void StubRoutines::initialize_compiler_stubs() {
|
||||
if (_compiler_stubs_code == nullptr) {
|
||||
_compiler_stubs_code = initialize_stubs(StubCodeGenerator::Compiler_stubs,
|
||||
_compiler_stubs_code_size, 100,
|
||||
"StubRoutines generation compiler stubs",
|
||||
"StubRoutines (compiler stubs)",
|
||||
"_compiler_stubs_code_size");
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,38 +312,13 @@ static void test_arraycopy_func(address func, int alignment) {
|
||||
}
|
||||
#endif // ASSERT
|
||||
|
||||
void StubRoutines::initializeContinuationStubs() {
|
||||
if (_code3 == nullptr) {
|
||||
ResourceMark rm;
|
||||
TraceTime timer("StubRoutines generation 3", TRACETIME_LOG(Info, startuptime));
|
||||
_code3 = BufferBlob::create("StubRoutines (3)", code_size2);
|
||||
if (_code3 == nullptr) {
|
||||
vm_exit_out_of_memory(code_size2, OOM_MALLOC_ERROR, "CodeCache: no room for StubRoutines (3)");
|
||||
}
|
||||
CodeBuffer buffer(_code3);
|
||||
StubGenerator_generate(&buffer, 1);
|
||||
// When new stubs added we need to make sure there is some space left
|
||||
// to catch situation when we should increase size again.
|
||||
assert(code_size2 == 0 || buffer.insts_remaining() > 200, "increase code_size3");
|
||||
}
|
||||
}
|
||||
|
||||
void StubRoutines::initialize2() {
|
||||
if (_code2 == nullptr) {
|
||||
ResourceMark rm;
|
||||
TraceTime timer("StubRoutines generation 2", TRACETIME_LOG(Info, startuptime));
|
||||
// Add extra space for large CodeEntryAlignment
|
||||
int max_aligned_stubs = 100;
|
||||
int size = code_size2 + CodeEntryAlignment * max_aligned_stubs;
|
||||
_code2 = BufferBlob::create("StubRoutines (2)", size);
|
||||
if (_code2 == nullptr) {
|
||||
vm_exit_out_of_memory(code_size2, OOM_MALLOC_ERROR, "CodeCache: no room for StubRoutines (2)");
|
||||
}
|
||||
CodeBuffer buffer(_code2);
|
||||
StubGenerator_generate(&buffer, 2);
|
||||
// When new stubs added we need to make sure there is some space left
|
||||
// to catch situation when we should increase size again.
|
||||
assert(code_size2 == 0 || buffer.insts_remaining() > 200, "increase code_size2");
|
||||
void StubRoutines::initialize_final_stubs() {
|
||||
if (_final_stubs_code == nullptr) {
|
||||
_final_stubs_code = initialize_stubs(StubCodeGenerator::Final_stubs,
|
||||
_final_stubs_code_size, 10,
|
||||
"StubRoutines generation final stubs",
|
||||
"StubRoutines (final stubs)",
|
||||
"_final_stubs_code_size");
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
@ -387,9 +404,24 @@ void StubRoutines::initialize2() {
|
||||
}
|
||||
|
||||
|
||||
void stubRoutines_init1() { StubRoutines::initialize1(); }
|
||||
void stubRoutines_init2() { StubRoutines::initialize2(); }
|
||||
void stubRoutines_initContinuationStubs() { StubRoutines::initializeContinuationStubs(); }
|
||||
void initial_stubs_init() { StubRoutines::initialize_initial_stubs(); }
|
||||
void continuation_stubs_init() { StubRoutines::initialize_continuation_stubs(); }
|
||||
void final_stubs_init() { StubRoutines::initialize_final_stubs(); }
|
||||
|
||||
void compiler_stubs_init(bool in_compiler_thread) {
|
||||
if (in_compiler_thread && DelayCompilerStubsGeneration) {
|
||||
// Temporarily revert state of stubs generation because
|
||||
// it is called after final_stubs_init() finished
|
||||
// during compiler runtime initialization.
|
||||
// It is fine because these stubs are only used by
|
||||
// compiled code and compiler is not running yet.
|
||||
StubCodeDesc::unfreeze();
|
||||
StubRoutines::initialize_compiler_stubs();
|
||||
StubCodeDesc::freeze();
|
||||
} else if (!in_compiler_thread && !DelayCompilerStubsGeneration) {
|
||||
StubRoutines::initialize_compiler_stubs();
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Default versions of arraycopy functions
|
||||
|
@ -148,9 +148,10 @@ class StubRoutines: AllStatic {
|
||||
static address _atomic_add_entry;
|
||||
static address _fence_entry;
|
||||
|
||||
static BufferBlob* _code1; // code buffer for initial routines
|
||||
static BufferBlob* _code2;
|
||||
static BufferBlob* _code3; // code buffer for all other routines
|
||||
static BufferBlob* _initial_stubs_code; // code buffer for initial routines
|
||||
static BufferBlob* _continuation_stubs_code; // code buffer for continuation stubs
|
||||
static BufferBlob* _compiler_stubs_code; // code buffer for C2 intrinsics
|
||||
static BufferBlob* _final_stubs_code; // code buffer for all other routines
|
||||
|
||||
// Leaf routines which implement arraycopy and their addresses
|
||||
// arraycopy operands aligned on element type boundary
|
||||
@ -265,21 +266,25 @@ class StubRoutines: AllStatic {
|
||||
|
||||
public:
|
||||
// Initialization/Testing
|
||||
static void initialize1(); // must happen before universe::genesis
|
||||
static void initialize2(); // must happen after universe::genesis
|
||||
static void initializeContinuationStubs(); // must happen after universe::genesis
|
||||
static void initialize_initial_stubs(); // must happen before universe::genesis
|
||||
static void initialize_continuation_stubs(); // must happen after universe::genesis
|
||||
static void initialize_compiler_stubs(); // must happen after universe::genesis
|
||||
static void initialize_final_stubs(); // must happen after universe::genesis
|
||||
|
||||
static bool is_stub_code(address addr) { return contains(addr); }
|
||||
|
||||
static bool contains(address addr) {
|
||||
return
|
||||
(_code1 != nullptr && _code1->blob_contains(addr)) ||
|
||||
(_code2 != nullptr && _code2->blob_contains(addr)) ;
|
||||
(_initial_stubs_code != nullptr && _initial_stubs_code->blob_contains(addr)) ||
|
||||
(_continuation_stubs_code != nullptr && _continuation_stubs_code->blob_contains(addr)) ||
|
||||
(_compiler_stubs_code != nullptr && _compiler_stubs_code->blob_contains(addr)) ||
|
||||
(_final_stubs_code != nullptr && _final_stubs_code->blob_contains(addr)) ;
|
||||
}
|
||||
|
||||
static RuntimeBlob* code1() { return _code1; }
|
||||
static RuntimeBlob* code2() { return _code2; }
|
||||
static RuntimeBlob* code3() { return _code3; }
|
||||
static RuntimeBlob* initial_stubs_code() { return _initial_stubs_code; }
|
||||
static RuntimeBlob* continuation_stubs_code() { return _continuation_stubs_code; }
|
||||
static RuntimeBlob* compiler_stubs_code() { return _compiler_stubs_code; }
|
||||
static RuntimeBlob* final_stubs_code() { return _final_stubs_code; }
|
||||
|
||||
// Debugging
|
||||
static jint verify_oop_count() { return _verify_oop_count; }
|
||||
|
Loading…
Reference in New Issue
Block a user