7104960: JSR 292: +VerifyMethodHandles in product JVM can overflow buffer
Reviewed-by: kvn, jrose, twisti
This commit is contained in:
parent
0bc2963b7d
commit
051c64ad4d
@ -597,6 +597,10 @@ inline void MacroAssembler::jmp( Register s1, Register s2 ) { jmpl( s1, s2, G0 )
|
|||||||
inline void MacroAssembler::jmp( Register s1, int simm13a, RelocationHolder const& rspec ) { jmpl( s1, simm13a, G0, rspec); }
|
inline void MacroAssembler::jmp( Register s1, int simm13a, RelocationHolder const& rspec ) { jmpl( s1, simm13a, G0, rspec); }
|
||||||
|
|
||||||
inline bool MacroAssembler::is_far_target(address d) {
|
inline bool MacroAssembler::is_far_target(address d) {
|
||||||
|
if (ForceUnreachable) {
|
||||||
|
// References outside the code cache should be treated as far
|
||||||
|
return d < CodeCache::low_bound() || d > CodeCache::high_bound();
|
||||||
|
}
|
||||||
return !is_in_wdisp30_range(d, CodeCache::low_bound()) || !is_in_wdisp30_range(d, CodeCache::high_bound());
|
return !is_in_wdisp30_range(d, CodeCache::low_bound()) || !is_in_wdisp30_range(d, CodeCache::high_bound());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -679,28 +683,44 @@ inline intptr_t MacroAssembler::load_pc_address( Register reg, int bytes_to_skip
|
|||||||
|
|
||||||
inline void MacroAssembler::load_contents(const AddressLiteral& addrlit, Register d, int offset) {
|
inline void MacroAssembler::load_contents(const AddressLiteral& addrlit, Register d, int offset) {
|
||||||
assert_not_delayed();
|
assert_not_delayed();
|
||||||
sethi(addrlit, d);
|
if (ForceUnreachable) {
|
||||||
|
patchable_sethi(addrlit, d);
|
||||||
|
} else {
|
||||||
|
sethi(addrlit, d);
|
||||||
|
}
|
||||||
ld(d, addrlit.low10() + offset, d);
|
ld(d, addrlit.low10() + offset, d);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void MacroAssembler::load_ptr_contents(const AddressLiteral& addrlit, Register d, int offset) {
|
inline void MacroAssembler::load_ptr_contents(const AddressLiteral& addrlit, Register d, int offset) {
|
||||||
assert_not_delayed();
|
assert_not_delayed();
|
||||||
sethi(addrlit, d);
|
if (ForceUnreachable) {
|
||||||
|
patchable_sethi(addrlit, d);
|
||||||
|
} else {
|
||||||
|
sethi(addrlit, d);
|
||||||
|
}
|
||||||
ld_ptr(d, addrlit.low10() + offset, d);
|
ld_ptr(d, addrlit.low10() + offset, d);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void MacroAssembler::store_contents(Register s, const AddressLiteral& addrlit, Register temp, int offset) {
|
inline void MacroAssembler::store_contents(Register s, const AddressLiteral& addrlit, Register temp, int offset) {
|
||||||
assert_not_delayed();
|
assert_not_delayed();
|
||||||
sethi(addrlit, temp);
|
if (ForceUnreachable) {
|
||||||
|
patchable_sethi(addrlit, temp);
|
||||||
|
} else {
|
||||||
|
sethi(addrlit, temp);
|
||||||
|
}
|
||||||
st(s, temp, addrlit.low10() + offset);
|
st(s, temp, addrlit.low10() + offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void MacroAssembler::store_ptr_contents(Register s, const AddressLiteral& addrlit, Register temp, int offset) {
|
inline void MacroAssembler::store_ptr_contents(Register s, const AddressLiteral& addrlit, Register temp, int offset) {
|
||||||
assert_not_delayed();
|
assert_not_delayed();
|
||||||
sethi(addrlit, temp);
|
if (ForceUnreachable) {
|
||||||
|
patchable_sethi(addrlit, temp);
|
||||||
|
} else {
|
||||||
|
sethi(addrlit, temp);
|
||||||
|
}
|
||||||
st_ptr(s, temp, addrlit.low10() + offset);
|
st_ptr(s, temp, addrlit.low10() + offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -352,6 +352,7 @@ void MethodHandles::load_stack_move(MacroAssembler* _masm,
|
|||||||
BLOCK_COMMENT("load_stack_move {");
|
BLOCK_COMMENT("load_stack_move {");
|
||||||
__ ldsw(G3_amh_conversion, stack_move_reg);
|
__ ldsw(G3_amh_conversion, stack_move_reg);
|
||||||
__ sra(stack_move_reg, CONV_STACK_MOVE_SHIFT, stack_move_reg);
|
__ sra(stack_move_reg, CONV_STACK_MOVE_SHIFT, stack_move_reg);
|
||||||
|
#ifdef ASSERT
|
||||||
if (VerifyMethodHandles) {
|
if (VerifyMethodHandles) {
|
||||||
Label L_ok, L_bad;
|
Label L_ok, L_bad;
|
||||||
int32_t stack_move_limit = 0x0800; // extra-large
|
int32_t stack_move_limit = 0x0800; // extra-large
|
||||||
@ -363,6 +364,7 @@ void MethodHandles::load_stack_move(MacroAssembler* _masm,
|
|||||||
__ stop("load_stack_move of garbage value");
|
__ stop("load_stack_move of garbage value");
|
||||||
__ BIND(L_ok);
|
__ BIND(L_ok);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
BLOCK_COMMENT("} load_stack_move");
|
BLOCK_COMMENT("} load_stack_move");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
// Adapters
|
// Adapters
|
||||||
enum /* platform_dependent_constants */ {
|
enum /* platform_dependent_constants */ {
|
||||||
adapter_code_size = NOT_LP64(22000 DEBUG_ONLY(+ 40000)) LP64_ONLY(32000 DEBUG_ONLY(+ 80000))
|
adapter_code_size = NOT_LP64(23000 DEBUG_ONLY(+ 40000)) LP64_ONLY(35000 DEBUG_ONLY(+ 50000))
|
||||||
};
|
};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -382,6 +382,7 @@ void MethodHandles::load_stack_move(MacroAssembler* _masm,
|
|||||||
__ movslq(rdi_stack_move, rdi_stack_move);
|
__ movslq(rdi_stack_move, rdi_stack_move);
|
||||||
}
|
}
|
||||||
#endif //_LP64
|
#endif //_LP64
|
||||||
|
#ifdef ASSERT
|
||||||
if (VerifyMethodHandles) {
|
if (VerifyMethodHandles) {
|
||||||
Label L_ok, L_bad;
|
Label L_ok, L_bad;
|
||||||
int32_t stack_move_limit = 0x4000; // extra-large
|
int32_t stack_move_limit = 0x4000; // extra-large
|
||||||
@ -393,6 +394,7 @@ void MethodHandles::load_stack_move(MacroAssembler* _masm,
|
|||||||
__ stop("load_stack_move of garbage value");
|
__ stop("load_stack_move of garbage value");
|
||||||
__ BIND(L_ok);
|
__ BIND(L_ok);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
BLOCK_COMMENT("} load_stack_move");
|
BLOCK_COMMENT("} load_stack_move");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
// Adapters
|
// Adapters
|
||||||
enum /* platform_dependent_constants */ {
|
enum /* platform_dependent_constants */ {
|
||||||
adapter_code_size = NOT_LP64(30000 DEBUG_ONLY(+ 10000)) LP64_ONLY(80000 DEBUG_ONLY(+ 120000))
|
adapter_code_size = NOT_LP64(16000 DEBUG_ONLY(+ 15000)) LP64_ONLY(32000 DEBUG_ONLY(+ 80000))
|
||||||
};
|
};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -26,6 +26,7 @@
|
|||||||
#include "asm/codeBuffer.hpp"
|
#include "asm/codeBuffer.hpp"
|
||||||
#include "compiler/disassembler.hpp"
|
#include "compiler/disassembler.hpp"
|
||||||
#include "utilities/copy.hpp"
|
#include "utilities/copy.hpp"
|
||||||
|
#include "utilities/xmlstream.hpp"
|
||||||
|
|
||||||
// The structure of a CodeSection:
|
// The structure of a CodeSection:
|
||||||
//
|
//
|
||||||
@ -81,7 +82,7 @@ typedef CodeBuffer::csize_t csize_t; // file-local definition
|
|||||||
CodeBuffer::CodeBuffer(CodeBlob* blob) {
|
CodeBuffer::CodeBuffer(CodeBlob* blob) {
|
||||||
initialize_misc("static buffer");
|
initialize_misc("static buffer");
|
||||||
initialize(blob->content_begin(), blob->content_size());
|
initialize(blob->content_begin(), blob->content_size());
|
||||||
assert(verify_section_allocation(), "initial use of buffer OK");
|
verify_section_allocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
void CodeBuffer::initialize(csize_t code_size, csize_t locs_size) {
|
void CodeBuffer::initialize(csize_t code_size, csize_t locs_size) {
|
||||||
@ -108,17 +109,18 @@ void CodeBuffer::initialize(csize_t code_size, csize_t locs_size) {
|
|||||||
_insts.initialize_locs(locs_size / sizeof(relocInfo));
|
_insts.initialize_locs(locs_size / sizeof(relocInfo));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(verify_section_allocation(), "initial use of blob is OK");
|
verify_section_allocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
CodeBuffer::~CodeBuffer() {
|
CodeBuffer::~CodeBuffer() {
|
||||||
|
verify_section_allocation();
|
||||||
|
|
||||||
// If we allocate our code buffer from the CodeCache
|
// If we allocate our code buffer from the CodeCache
|
||||||
// via a BufferBlob, and it's not permanent, then
|
// via a BufferBlob, and it's not permanent, then
|
||||||
// free the BufferBlob.
|
// free the BufferBlob.
|
||||||
// The rest of the memory will be freed when the ResourceObj
|
// The rest of the memory will be freed when the ResourceObj
|
||||||
// is released.
|
// is released.
|
||||||
assert(verify_section_allocation(), "final storage configuration still OK");
|
|
||||||
for (CodeBuffer* cb = this; cb != NULL; cb = cb->before_expand()) {
|
for (CodeBuffer* cb = this; cb != NULL; cb = cb->before_expand()) {
|
||||||
// Previous incarnations of this buffer are held live, so that internal
|
// Previous incarnations of this buffer are held live, so that internal
|
||||||
// addresses constructed before expansions will not be confused.
|
// addresses constructed before expansions will not be confused.
|
||||||
@ -484,7 +486,7 @@ void CodeBuffer::compute_final_layout(CodeBuffer* dest) const {
|
|||||||
|
|
||||||
// Done calculating sections; did it come out to the right end?
|
// Done calculating sections; did it come out to the right end?
|
||||||
assert(buf_offset == total_content_size(), "sanity");
|
assert(buf_offset == total_content_size(), "sanity");
|
||||||
assert(dest->verify_section_allocation(), "final configuration works");
|
dest->verify_section_allocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
csize_t CodeBuffer::total_offset_of(CodeSection* cs) const {
|
csize_t CodeBuffer::total_offset_of(CodeSection* cs) const {
|
||||||
@ -810,7 +812,7 @@ void CodeBuffer::expand(CodeSection* which_cs, csize_t amount) {
|
|||||||
_decode_begin = NULL; // sanity
|
_decode_begin = NULL; // sanity
|
||||||
|
|
||||||
// Make certain that the new sections are all snugly inside the new blob.
|
// Make certain that the new sections are all snugly inside the new blob.
|
||||||
assert(verify_section_allocation(), "expanded allocation is ship-shape");
|
verify_section_allocation();
|
||||||
|
|
||||||
#ifndef PRODUCT
|
#ifndef PRODUCT
|
||||||
if (PrintNMethods && (WizardMode || Verbose)) {
|
if (PrintNMethods && (WizardMode || Verbose)) {
|
||||||
@ -839,35 +841,48 @@ void CodeBuffer::take_over_code_from(CodeBuffer* cb) {
|
|||||||
DEBUG_ONLY(cb->_blob = (BufferBlob*)badAddress);
|
DEBUG_ONLY(cb->_blob = (BufferBlob*)badAddress);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef ASSERT
|
void CodeBuffer::verify_section_allocation() {
|
||||||
bool CodeBuffer::verify_section_allocation() {
|
|
||||||
address tstart = _total_start;
|
address tstart = _total_start;
|
||||||
if (tstart == badAddress) return true; // smashed by set_blob(NULL)
|
if (tstart == badAddress) return; // smashed by set_blob(NULL)
|
||||||
address tend = tstart + _total_size;
|
address tend = tstart + _total_size;
|
||||||
if (_blob != NULL) {
|
if (_blob != NULL) {
|
||||||
assert(tstart >= _blob->content_begin(), "sanity");
|
|
||||||
assert(tend <= _blob->content_end(), "sanity");
|
guarantee(tstart >= _blob->content_begin(), "sanity");
|
||||||
|
guarantee(tend <= _blob->content_end(), "sanity");
|
||||||
}
|
}
|
||||||
// Verify disjointness.
|
// Verify disjointness.
|
||||||
for (int n = (int) SECT_FIRST; n < (int) SECT_LIMIT; n++) {
|
for (int n = (int) SECT_FIRST; n < (int) SECT_LIMIT; n++) {
|
||||||
CodeSection* sect = code_section(n);
|
CodeSection* sect = code_section(n);
|
||||||
if (!sect->is_allocated() || sect->is_empty()) continue;
|
if (!sect->is_allocated() || sect->is_empty()) continue;
|
||||||
assert((intptr_t)sect->start() % sect->alignment() == 0
|
guarantee((intptr_t)sect->start() % sect->alignment() == 0
|
||||||
|| sect->is_empty() || _blob == NULL,
|
|| sect->is_empty() || _blob == NULL,
|
||||||
"start is aligned");
|
"start is aligned");
|
||||||
for (int m = (int) SECT_FIRST; m < (int) SECT_LIMIT; m++) {
|
for (int m = (int) SECT_FIRST; m < (int) SECT_LIMIT; m++) {
|
||||||
CodeSection* other = code_section(m);
|
CodeSection* other = code_section(m);
|
||||||
if (!other->is_allocated() || other == sect) continue;
|
if (!other->is_allocated() || other == sect) continue;
|
||||||
assert(!other->contains(sect->start() ), "sanity");
|
guarantee(!other->contains(sect->start() ), "sanity");
|
||||||
// limit is an exclusive address and can be the start of another
|
// limit is an exclusive address and can be the start of another
|
||||||
// section.
|
// section.
|
||||||
assert(!other->contains(sect->limit() - 1), "sanity");
|
guarantee(!other->contains(sect->limit() - 1), "sanity");
|
||||||
}
|
}
|
||||||
assert(sect->end() <= tend, "sanity");
|
guarantee(sect->end() <= tend, "sanity");
|
||||||
|
guarantee(sect->end() <= sect->limit(), "sanity");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void CodeBuffer::log_section_sizes(const char* name) {
|
||||||
|
if (xtty != NULL) {
|
||||||
|
// log info about buffer usage
|
||||||
|
xtty->print_cr("<blob name='%s' size='%d'>", name, _total_size);
|
||||||
|
for (int n = (int) CodeBuffer::SECT_FIRST; n < (int) CodeBuffer::SECT_LIMIT; n++) {
|
||||||
|
CodeSection* sect = code_section(n);
|
||||||
|
if (!sect->is_allocated() || sect->is_empty()) continue;
|
||||||
|
xtty->print_cr("<sect index='%d' size='" SIZE_FORMAT "' free='" SIZE_FORMAT "'/>",
|
||||||
|
n, sect->limit() - sect->start(), sect->limit() - sect->end());
|
||||||
|
}
|
||||||
|
xtty->print_cr("</blob>");
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
#endif //ASSERT
|
|
||||||
|
|
||||||
#ifndef PRODUCT
|
#ifndef PRODUCT
|
||||||
|
|
||||||
@ -895,7 +910,6 @@ void CodeBuffer::block_comment(intptr_t offset, const char * comment) {
|
|||||||
_comments.add_comment(offset, comment);
|
_comments.add_comment(offset, comment);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class CodeComment: public CHeapObj {
|
class CodeComment: public CHeapObj {
|
||||||
private:
|
private:
|
||||||
friend class CodeComments;
|
friend class CodeComments;
|
||||||
|
@ -362,10 +362,8 @@ class CodeBuffer: public StackObj {
|
|||||||
// helper for CodeBuffer::expand()
|
// helper for CodeBuffer::expand()
|
||||||
void take_over_code_from(CodeBuffer* cs);
|
void take_over_code_from(CodeBuffer* cs);
|
||||||
|
|
||||||
#ifdef ASSERT
|
|
||||||
// ensure sections are disjoint, ordered, and contained in the blob
|
// ensure sections are disjoint, ordered, and contained in the blob
|
||||||
bool verify_section_allocation();
|
void verify_section_allocation();
|
||||||
#endif
|
|
||||||
|
|
||||||
// copies combined relocations to the blob, returns bytes copied
|
// copies combined relocations to the blob, returns bytes copied
|
||||||
// (if target is null, it is a dry run only, just for sizing)
|
// (if target is null, it is a dry run only, just for sizing)
|
||||||
@ -393,7 +391,7 @@ class CodeBuffer: public StackObj {
|
|||||||
assert(code_start != NULL, "sanity");
|
assert(code_start != NULL, "sanity");
|
||||||
initialize_misc("static buffer");
|
initialize_misc("static buffer");
|
||||||
initialize(code_start, code_size);
|
initialize(code_start, code_size);
|
||||||
assert(verify_section_allocation(), "initial use of buffer OK");
|
verify_section_allocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
// (2) CodeBuffer referring to pre-allocated CodeBlob.
|
// (2) CodeBuffer referring to pre-allocated CodeBlob.
|
||||||
@ -545,6 +543,9 @@ class CodeBuffer: public StackObj {
|
|||||||
|
|
||||||
void block_comment(intptr_t offset, const char * comment) PRODUCT_RETURN;
|
void block_comment(intptr_t offset, const char * comment) PRODUCT_RETURN;
|
||||||
|
|
||||||
|
// Log a little info about section usage in the CodeBuffer
|
||||||
|
void log_section_sizes(const char* name);
|
||||||
|
|
||||||
#ifndef PRODUCT
|
#ifndef PRODUCT
|
||||||
public:
|
public:
|
||||||
// Printing / Decoding
|
// Printing / Decoding
|
||||||
|
@ -206,9 +206,12 @@ void MethodHandles::generate_adapters() {
|
|||||||
_adapter_code = MethodHandlesAdapterBlob::create(adapter_code_size);
|
_adapter_code = MethodHandlesAdapterBlob::create(adapter_code_size);
|
||||||
if (_adapter_code == NULL)
|
if (_adapter_code == NULL)
|
||||||
vm_exit_out_of_memory(adapter_code_size, "CodeCache: no room for MethodHandles adapters");
|
vm_exit_out_of_memory(adapter_code_size, "CodeCache: no room for MethodHandles adapters");
|
||||||
CodeBuffer code(_adapter_code);
|
{
|
||||||
MethodHandlesAdapterGenerator g(&code);
|
CodeBuffer code(_adapter_code);
|
||||||
g.generate();
|
MethodHandlesAdapterGenerator g(&code);
|
||||||
|
g.generate();
|
||||||
|
code.log_section_sizes("MethodHandlesAdapterBlob");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
|
@ -577,8 +577,8 @@ class CommandLineFlags {
|
|||||||
develop(bool, VerifyStack, false, \
|
develop(bool, VerifyStack, false, \
|
||||||
"Verify stack of each thread when it is entering a runtime call") \
|
"Verify stack of each thread when it is entering a runtime call") \
|
||||||
\
|
\
|
||||||
develop(bool, ForceUnreachable, false, \
|
diagnostic(bool, ForceUnreachable, false, \
|
||||||
"(amd64) Make all non code cache addresses to be unreachable with rip-rel forcing use of 64bit literal fixups") \
|
"Make all non code cache addresses to be unreachable with forcing use of 64bit literal fixups") \
|
||||||
\
|
\
|
||||||
notproduct(bool, StressDerivedPointers, false, \
|
notproduct(bool, StressDerivedPointers, false, \
|
||||||
"Force scavenge when a derived pointers is detected on stack " \
|
"Force scavenge when a derived pointers is detected on stack " \
|
||||||
|
Loading…
x
Reference in New Issue
Block a user