8298720: Insufficient error handling when CodeBuffer is exhausted

Reviewed-by: kvn, fyang
This commit is contained in:
Tobias Hartmann 2023-01-06 08:28:09 +00:00
parent b5b7948d9b
commit cc4936a79e
8 changed files with 85 additions and 24 deletions

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2021, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -199,9 +199,12 @@ void C1_MacroAssembler::initialize_body(Register obj, Register len_in_bytes, int
mov(rscratch1, len_in_bytes);
lea(t1, Address(obj, hdr_size_in_bytes));
lsr(t2, rscratch1, LogBytesPerWord);
zero_words(t1, t2);
address tpc = zero_words(t1, t2);
bind(done);
if (tpc == nullptr) {
Compilation::current()->bailout("no space for trampoline stub");
}
}
@ -228,10 +231,17 @@ void C1_MacroAssembler::initialize_object(Register obj, Register klass, Register
if (var_size_in_bytes != noreg) {
mov(index, var_size_in_bytes);
initialize_body(obj, index, hdr_size_in_bytes, t1, t2);
if (Compilation::current()->bailed_out()) {
return;
}
} else if (con_size_in_bytes > hdr_size_in_bytes) {
con_size_in_bytes -= hdr_size_in_bytes;
lea(t1, Address(obj, hdr_size_in_bytes));
zero_words(t1, con_size_in_bytes / BytesPerWord);
address tpc = zero_words(t1, con_size_in_bytes / BytesPerWord);
if (tpc == nullptr) {
Compilation::current()->bailout("no space for trampoline stub");
return;
}
}
}
@ -267,6 +277,9 @@ void C1_MacroAssembler::allocate_array(Register obj, Register len, Register t1,
// clear rest of allocated space
initialize_body(obj, arr_size, header_size * BytesPerWord, t1, t2);
if (Compilation::current()->bailed_out()) {
return;
}
membar(StoreStore);

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -298,7 +298,12 @@ void C2_MacroAssembler::string_indexof(Register str2, Register str1,
stub = RuntimeAddress(StubRoutines::aarch64::string_indexof_linear_uu());
assert(stub.target() != NULL, "string_indexof_linear_uu stub has not been generated");
}
trampoline_call(stub);
address call = trampoline_call(stub);
if (call == nullptr) {
DEBUG_ONLY(reset_labels(LINEARSEARCH, LINEAR_MEDIUM, DONE, NOMATCH, MATCH));
ciEnv::current()->record_failure("CodeCache is full");
return;
}
b(DONE);
}
@ -857,7 +862,12 @@ void C2_MacroAssembler::string_compare(Register str1, Register str2,
ShouldNotReachHere();
}
assert(stub.target() != NULL, "compare_long_string stub has not been generated");
trampoline_call(stub);
address call = trampoline_call(stub);
if (call == nullptr) {
DEBUG_ONLY(reset_labels(DONE, SHORT_LOOP, SHORT_STRING, SHORT_LAST, SHORT_LOOP_TAIL, SHORT_LAST2, SHORT_LAST_INIT, SHORT_LOOP_START));
ciEnv::current()->record_failure("CodeCache is full");
return;
}
b(DONE);
bind(SHORT_STRING);

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2021, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -1084,9 +1084,6 @@ public:
bool acquire, bool release, bool weak,
Register result);
private:
void compare_eq(Register rn, Register rm, enum operand_size size);
#ifdef ASSERT
// Template short-hand support to clean-up after a failed call to trampoline
// call generation (see trampoline_call() below), when a set of Labels must
@ -1101,6 +1098,9 @@ private:
}
#endif
private:
void compare_eq(Register rn, Register rm, enum operand_size size);
public:
// AArch64 OpenJDK uses four different types of calls:
// - direct call: bl pc_relative_offset

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2021, Red Hat Inc. All rights reserved.
* Copyright (c) 2021, Azul Systems, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -1103,6 +1103,9 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ cbnz(c_rarg2, call_thaw);
const address tr_call = __ trampoline_call(resolve);
if (tr_call == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
oop_maps->add_gc_map(__ pc() - start, map);
__ post_call_nop();
@ -1110,7 +1113,10 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ b(exit);
CodeBuffer* cbuf = masm->code_section()->outer();
CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
address stub = CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
if (stub == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
}
// compiled entry
@ -1127,6 +1133,9 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ cbnz(c_rarg2, call_thaw);
const address tr_call = __ trampoline_call(resolve);
if (tr_call == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
oop_maps->add_gc_map(__ pc() - start, map);
__ post_call_nop();
@ -1168,7 +1177,10 @@ static void gen_continuation_enter(MacroAssembler* masm,
}
CodeBuffer* cbuf = masm->code_section()->outer();
CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
address stub = CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
if (stub == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
}
static void gen_continuation_yield(MacroAssembler* masm,

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2022, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -2361,7 +2361,10 @@ class StubGenerator: public StubCodeGenerator {
__ cbnz(value, non_block_zeroing);
__ mov(bz_base, to);
__ add(to, to, cnt_words, Assembler::LSL, LogBytesPerWord);
__ zero_words(bz_base, cnt_words);
address tpc = __ zero_words(bz_base, cnt_words);
if (tpc == nullptr) {
fatal("CodeCache is full at generate_fill");
}
__ b(rest);
__ bind(non_block_zeroing);
__ fill_words(to, cnt_words, value);

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -562,7 +562,12 @@ void C2_MacroAssembler::string_indexof(Register haystack, Register needle,
stub = RuntimeAddress(StubRoutines::riscv::string_indexof_linear_uu());
assert(stub.target() != NULL, "string_indexof_linear_uu stub has not been generated");
}
trampoline_call(stub);
address call = trampoline_call(stub);
if (call == nullptr) {
DEBUG_ONLY(reset_labels(LINEARSEARCH, DONE, NOMATCH));
ciEnv::current()->record_failure("CodeCache is full");
return;
}
j(DONE);
bind(NOMATCH);
@ -965,7 +970,12 @@ void C2_MacroAssembler::string_compare(Register str1, Register str2,
ShouldNotReachHere();
}
assert(stub.target() != NULL, "compare_long_string stub has not been generated");
trampoline_call(stub);
address call = trampoline_call(stub);
if (call == nullptr) {
DEBUG_ONLY(reset_labels(DONE, SHORT_LOOP, SHORT_STRING, SHORT_LAST, SHORT_LOOP_TAIL, SHORT_LAST2, SHORT_LAST_INIT, SHORT_LOOP_START));
ciEnv::current()->record_failure("CodeCache is full");
return;
}
j(DONE);
bind(SHORT_STRING);

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -1328,8 +1328,6 @@ public:
jalr(x0, x1, 0);
}
private:
#ifdef ASSERT
// Template short-hand support to clean-up after a failed call to trampoline
// call generation (see trampoline_call() below), when a set of Labels must
@ -1343,6 +1341,9 @@ private:
lbl.reset();
}
#endif
private:
void repne_scan(Register addr, Register value, Register count, Register tmp);
// Return true if an address is within the 48-bit RISCV64 address space.

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -978,6 +978,9 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ align(NativeInstruction::instruction_size);
const address tr_call = __ trampoline_call(resolve);
if (tr_call == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
oop_maps->add_gc_map(__ pc() - start, map);
__ post_call_nop();
@ -985,7 +988,10 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ j(exit);
CodeBuffer* cbuf = masm->code_section()->outer();
CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
address stub = CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
if (stub == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
}
// compiled entry
@ -1005,6 +1011,9 @@ static void gen_continuation_enter(MacroAssembler* masm,
__ align(NativeInstruction::instruction_size);
const address tr_call = __ trampoline_call(resolve);
if (tr_call == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
oop_maps->add_gc_map(__ pc() - start, map);
__ post_call_nop();
@ -1045,7 +1054,10 @@ static void gen_continuation_enter(MacroAssembler* masm,
}
CodeBuffer* cbuf = masm->code_section()->outer();
CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
address stub = CompiledStaticCall::emit_to_interp_stub(*cbuf, tr_call);
if (stub == nullptr) {
fatal("CodeCache is full at gen_continuation_enter");
}
}
static void gen_continuation_yield(MacroAssembler* masm,