8037821: Account for trampoline stubs when estimating code buffer sizes
Take into account space needed for "trampoline code" used by calls on PPC64. Reviewed-by: kvn
This commit is contained in:
parent
ddc2f91ab4
commit
ac75d4fc2f
@ -891,6 +891,13 @@ definitions %{
|
||||
// This is a block of C++ code which provides values, functions, and
|
||||
// definitions necessary in the rest of the architecture description.
|
||||
source_hpp %{
|
||||
// Header information of the source block.
|
||||
// Method declarations/definitions which are used outside
|
||||
// the ad-scope can conveniently be defined here.
|
||||
//
|
||||
// To keep related declarations/definitions/uses close together,
|
||||
// we switch between source %{ }% and source_hpp %{ }% freely as needed.
|
||||
|
||||
// Returns true if Node n is followed by a MemBar node that
|
||||
// will do an acquire. If so, this node must not do the acquire
|
||||
// operation.
|
||||
@ -1114,6 +1121,40 @@ static inline void emit_long(CodeBuffer &cbuf, int value) {
|
||||
|
||||
//=============================================================================
|
||||
|
||||
%} // interrupt source
|
||||
|
||||
source_hpp %{ // Header information of the source block.
|
||||
|
||||
//--------------------------------------------------------------
|
||||
//---< Used for optimization in Compile::Shorten_branches >---
|
||||
//--------------------------------------------------------------
|
||||
|
||||
const uint trampoline_stub_size = 6 * BytesPerInstWord;
|
||||
|
||||
class CallStubImpl {
|
||||
|
||||
public:
|
||||
|
||||
static void emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int insts_call_instruction_offset);
|
||||
|
||||
// Size of call trampoline stub.
|
||||
// This doesn't need to be accurate to the byte, but it
|
||||
// must be larger than or equal to the real size of the stub.
|
||||
static uint size_call_trampoline() {
|
||||
return trampoline_stub_size;
|
||||
}
|
||||
|
||||
// number of relocations needed by a call trampoline stub
|
||||
static uint reloc_call_trampoline() {
|
||||
return 5;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
%} // end source_hpp
|
||||
|
||||
source %{
|
||||
|
||||
// Emit a trampoline stub for a call to a target which is too far away.
|
||||
//
|
||||
// code sequences:
|
||||
@ -1125,9 +1166,7 @@ static inline void emit_long(CodeBuffer &cbuf, int value) {
|
||||
// load the call target from the constant pool
|
||||
// branch via CTR (LR/link still points to the call-site above)
|
||||
|
||||
const uint trampoline_stub_size = 6 * BytesPerInstWord;
|
||||
|
||||
void emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int insts_call_instruction_offset) {
|
||||
void CallStubImpl::emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int insts_call_instruction_offset) {
|
||||
// Start the stub.
|
||||
address stub = __ start_a_stub(Compile::MAX_stubs_size/2);
|
||||
if (stub == NULL) {
|
||||
@ -1170,19 +1209,6 @@ void emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int
|
||||
__ end_a_stub();
|
||||
}
|
||||
|
||||
// Size of trampoline stub, this doesn't need to be accurate but it must
|
||||
// be larger or equal to the real size of the stub.
|
||||
// Used for optimization in Compile::Shorten_branches.
|
||||
uint size_call_trampoline() {
|
||||
return trampoline_stub_size;
|
||||
}
|
||||
|
||||
// Number of relocation entries needed by trampoline stub.
|
||||
// Used for optimization in Compile::Shorten_branches.
|
||||
uint reloc_call_trampoline() {
|
||||
return 5;
|
||||
}
|
||||
|
||||
//=============================================================================
|
||||
|
||||
// Emit an inline branch-and-link call and a related trampoline stub.
|
||||
@ -1221,7 +1247,7 @@ EmitCallOffsets emit_call_with_trampoline_stub(MacroAssembler &_masm, address en
|
||||
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
|
||||
|
||||
// Emit the trampoline stub which will be related to the branch-and-link below.
|
||||
emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
|
||||
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
|
||||
__ relocate(rtype);
|
||||
}
|
||||
|
||||
@ -2023,17 +2049,34 @@ uint MachUEPNode::size(PhaseRegAlloc *ra_) const {
|
||||
|
||||
//=============================================================================
|
||||
|
||||
uint size_exception_handler() {
|
||||
// The exception_handler is a b64_patchable.
|
||||
return MacroAssembler::b64_patchable_size;
|
||||
}
|
||||
%} // interrupt source
|
||||
|
||||
uint size_deopt_handler() {
|
||||
// The deopt_handler is a bl64_patchable.
|
||||
return MacroAssembler::bl64_patchable_size;
|
||||
}
|
||||
source_hpp %{ // Header information of the source block.
|
||||
|
||||
int emit_exception_handler(CodeBuffer &cbuf) {
|
||||
class HandlerImpl {
|
||||
|
||||
public:
|
||||
|
||||
static int emit_exception_handler(CodeBuffer &cbuf);
|
||||
static int emit_deopt_handler(CodeBuffer& cbuf);
|
||||
|
||||
static uint size_exception_handler() {
|
||||
// The exception_handler is a b64_patchable.
|
||||
return MacroAssembler::b64_patchable_size;
|
||||
}
|
||||
|
||||
static uint size_deopt_handler() {
|
||||
// The deopt_handler is a bl64_patchable.
|
||||
return MacroAssembler::bl64_patchable_size;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
%} // end source_hpp
|
||||
|
||||
source %{
|
||||
|
||||
int HandlerImpl::emit_exception_handler(CodeBuffer &cbuf) {
|
||||
MacroAssembler _masm(&cbuf);
|
||||
|
||||
address base = __ start_a_stub(size_exception_handler());
|
||||
@ -2050,7 +2093,7 @@ int emit_exception_handler(CodeBuffer &cbuf) {
|
||||
|
||||
// The deopt_handler is like the exception handler, but it calls to
|
||||
// the deoptimization blob instead of jumping to the exception blob.
|
||||
int emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
MacroAssembler _masm(&cbuf);
|
||||
|
||||
address base = __ start_a_stub(size_deopt_handler());
|
||||
@ -3438,7 +3481,7 @@ encode %{
|
||||
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
|
||||
|
||||
// Emit the trampoline stub which will be related to the branch-and-link below.
|
||||
emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
|
||||
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
|
||||
__ relocate(_optimized_virtual ?
|
||||
relocInfo::opt_virtual_call_type : relocInfo::static_call_type);
|
||||
}
|
||||
@ -3481,7 +3524,7 @@ encode %{
|
||||
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
|
||||
|
||||
// Emit the trampoline stub which will be related to the branch-and-link below.
|
||||
emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
|
||||
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
|
||||
assert(_optimized_virtual, "methodHandle call should be a virtual call");
|
||||
__ relocate(relocInfo::opt_virtual_call_type);
|
||||
}
|
||||
@ -3531,7 +3574,7 @@ encode %{
|
||||
const address entry_point = !($meth$$method) ? 0 : (address)$meth$$method;
|
||||
const address entry_point_const = __ address_constant(entry_point, RelocationHolder::none);
|
||||
const int entry_point_const_toc_offset = __ offset_to_method_toc(entry_point_const);
|
||||
emit_trampoline_stub(_masm, entry_point_const_toc_offset, __ offset());
|
||||
CallStubImpl::emit_trampoline_stub(_masm, entry_point_const_toc_offset, __ offset());
|
||||
|
||||
if (ra_->C->env()->failing())
|
||||
return;
|
||||
|
@ -34,6 +34,7 @@
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "runtime/vframeArray.hpp"
|
||||
#include "vmreg_ppc.inline.hpp"
|
||||
#include "adfiles/ad_ppc_64.hpp"
|
||||
#ifdef COMPILER1
|
||||
#include "c1/c1_Runtime1.hpp"
|
||||
#endif
|
||||
@ -52,10 +53,6 @@
|
||||
#define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
|
||||
|
||||
|
||||
// Used by generate_deopt_blob. Defined in .ad file.
|
||||
extern uint size_deopt_handler();
|
||||
|
||||
|
||||
class RegisterSaver {
|
||||
// Used for saving volatile registers.
|
||||
public:
|
||||
@ -2782,7 +2779,7 @@ void SharedRuntime::generate_deopt_blob() {
|
||||
// We can't grab a free register here, because all registers may
|
||||
// contain live values, so let the RegisterSaver do the adjustment
|
||||
// of the return pc.
|
||||
const int return_pc_adjustment_no_exception = -size_deopt_handler();
|
||||
const int return_pc_adjustment_no_exception = -HandlerImpl::size_deopt_handler();
|
||||
|
||||
// Push the "unpack frame"
|
||||
// Save everything in sight.
|
||||
|
@ -457,6 +457,13 @@ definitions %{
|
||||
// This is a block of C++ code which provides values, functions, and
|
||||
// definitions necessary in the rest of the architecture description
|
||||
source_hpp %{
|
||||
// Header information of the source block.
|
||||
// Method declarations/definitions which are used outside
|
||||
// the ad-scope can conveniently be defined here.
|
||||
//
|
||||
// To keep related declarations/definitions/uses close together,
|
||||
// we switch between source %{ }% and source_hpp %{ }% freely as needed.
|
||||
|
||||
// Must be visible to the DFA in dfa_sparc.cpp
|
||||
extern bool can_branch_register( Node *bol, Node *cmp );
|
||||
|
||||
@ -468,6 +475,46 @@ extern bool use_block_zeroing(Node* count);
|
||||
#define LONG_HI_REG(x) (x)
|
||||
#define LONG_LO_REG(x) (x)
|
||||
|
||||
class CallStubImpl {
|
||||
|
||||
//--------------------------------------------------------------
|
||||
//---< Used for optimization in Compile::Shorten_branches >---
|
||||
//--------------------------------------------------------------
|
||||
|
||||
public:
|
||||
// Size of call trampoline stub.
|
||||
static uint size_call_trampoline() {
|
||||
return 0; // no call trampolines on this platform
|
||||
}
|
||||
|
||||
// number of relocations needed by a call trampoline stub
|
||||
static uint reloc_call_trampoline() {
|
||||
return 0; // no call trampolines on this platform
|
||||
}
|
||||
};
|
||||
|
||||
class HandlerImpl {
|
||||
|
||||
public:
|
||||
|
||||
static int emit_exception_handler(CodeBuffer &cbuf);
|
||||
static int emit_deopt_handler(CodeBuffer& cbuf);
|
||||
|
||||
static uint size_exception_handler() {
|
||||
if (TraceJumps) {
|
||||
return (400); // just a guess
|
||||
}
|
||||
return ( NativeJump::instruction_size ); // sethi;jmp;nop
|
||||
}
|
||||
|
||||
static uint size_deopt_handler() {
|
||||
if (TraceJumps) {
|
||||
return (400); // just a guess
|
||||
}
|
||||
return ( 4+ NativeJump::instruction_size ); // save;sethi;jmp;restore
|
||||
}
|
||||
};
|
||||
|
||||
%}
|
||||
|
||||
source %{
|
||||
@ -1710,22 +1757,9 @@ uint MachUEPNode::size(PhaseRegAlloc *ra_) const {
|
||||
|
||||
//=============================================================================
|
||||
|
||||
uint size_exception_handler() {
|
||||
if (TraceJumps) {
|
||||
return (400); // just a guess
|
||||
}
|
||||
return ( NativeJump::instruction_size ); // sethi;jmp;nop
|
||||
}
|
||||
|
||||
uint size_deopt_handler() {
|
||||
if (TraceJumps) {
|
||||
return (400); // just a guess
|
||||
}
|
||||
return ( 4+ NativeJump::instruction_size ); // save;sethi;jmp;restore
|
||||
}
|
||||
|
||||
// Emit exception handler code.
|
||||
int emit_exception_handler(CodeBuffer& cbuf) {
|
||||
int HandlerImpl::emit_exception_handler(CodeBuffer& cbuf) {
|
||||
Register temp_reg = G3;
|
||||
AddressLiteral exception_blob(OptoRuntime::exception_blob()->entry_point());
|
||||
MacroAssembler _masm(&cbuf);
|
||||
@ -1746,7 +1780,7 @@ int emit_exception_handler(CodeBuffer& cbuf) {
|
||||
return offset;
|
||||
}
|
||||
|
||||
int emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
// Can't use any of the current frame's registers as we may have deopted
|
||||
// at a poll and everything (including G3) can be live.
|
||||
Register temp_reg = L0;
|
||||
|
@ -474,7 +474,125 @@ reg_class vectory_reg(XMM0, XMM0b, XMM0c, XMM0d, XMM0e, XMM0f, XMM0g, XMM
|
||||
|
||||
%}
|
||||
|
||||
|
||||
//----------SOURCE BLOCK-------------------------------------------------------
|
||||
// This is a block of C++ code which provides values, functions, and
|
||||
// definitions necessary in the rest of the architecture description
|
||||
|
||||
source_hpp %{
|
||||
// Header information of the source block.
|
||||
// Method declarations/definitions which are used outside
|
||||
// the ad-scope can conveniently be defined here.
|
||||
//
|
||||
// To keep related declarations/definitions/uses close together,
|
||||
// we switch between source %{ }% and source_hpp %{ }% freely as needed.
|
||||
|
||||
class CallStubImpl {
|
||||
|
||||
//--------------------------------------------------------------
|
||||
//---< Used for optimization in Compile::shorten_branches >---
|
||||
//--------------------------------------------------------------
|
||||
|
||||
public:
|
||||
// Size of call trampoline stub.
|
||||
static uint size_call_trampoline() {
|
||||
return 0; // no call trampolines on this platform
|
||||
}
|
||||
|
||||
// number of relocations needed by a call trampoline stub
|
||||
static uint reloc_call_trampoline() {
|
||||
return 0; // no call trampolines on this platform
|
||||
}
|
||||
};
|
||||
|
||||
class HandlerImpl {
|
||||
|
||||
public:
|
||||
|
||||
static int emit_exception_handler(CodeBuffer &cbuf);
|
||||
static int emit_deopt_handler(CodeBuffer& cbuf);
|
||||
|
||||
static uint size_exception_handler() {
|
||||
// NativeCall instruction size is the same as NativeJump.
|
||||
// exception handler starts out as jump and can be patched to
|
||||
// a call be deoptimization. (4932387)
|
||||
// Note that this value is also credited (in output.cpp) to
|
||||
// the size of the code section.
|
||||
return NativeJump::instruction_size;
|
||||
}
|
||||
|
||||
#ifdef _LP64
|
||||
static uint size_deopt_handler() {
|
||||
// three 5 byte instructions
|
||||
return 15;
|
||||
}
|
||||
#else
|
||||
static uint size_deopt_handler() {
|
||||
// NativeCall instruction size is the same as NativeJump.
|
||||
// exception handler starts out as jump and can be patched to
|
||||
// a call be deoptimization. (4932387)
|
||||
// Note that this value is also credited (in output.cpp) to
|
||||
// the size of the code section.
|
||||
return 5 + NativeJump::instruction_size; // pushl(); jmp;
|
||||
}
|
||||
#endif
|
||||
};
|
||||
|
||||
%} // end source_hpp
|
||||
|
||||
source %{
|
||||
|
||||
// Emit exception handler code.
|
||||
// Stuff framesize into a register and call a VM stub routine.
|
||||
int HandlerImpl::emit_exception_handler(CodeBuffer& cbuf) {
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base = __ start_a_stub(size_exception_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
__ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
|
||||
assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
// Emit deopt handler code.
|
||||
int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base = __ start_a_stub(size_deopt_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
|
||||
#ifdef _LP64
|
||||
address the_pc = (address) __ pc();
|
||||
Label next;
|
||||
// push a "the_pc" on the stack without destroying any registers
|
||||
// as they all may be live.
|
||||
|
||||
// push address of "next"
|
||||
__ call(next, relocInfo::none); // reloc none is fine since it is a disp32
|
||||
__ bind(next);
|
||||
// adjust it so it matches "the_pc"
|
||||
__ subptr(Address(rsp, 0), __ offset() - offset);
|
||||
#else
|
||||
InternalAddress here(__ pc());
|
||||
__ pushptr(here.addr());
|
||||
#endif
|
||||
|
||||
__ jump(RuntimeAddress(SharedRuntime::deopt_blob()->unpack()));
|
||||
assert(__ offset() - offset <= (int) size_deopt_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
|
||||
//=============================================================================
|
||||
|
||||
// Float masks come from different places depending on platform.
|
||||
#ifdef _LP64
|
||||
static address float_signmask() { return StubRoutines::x86::float_sign_mask(); }
|
||||
|
@ -1297,59 +1297,6 @@ uint MachUEPNode::size(PhaseRegAlloc *ra_) const {
|
||||
|
||||
|
||||
//=============================================================================
|
||||
uint size_exception_handler() {
|
||||
// NativeCall instruction size is the same as NativeJump.
|
||||
// exception handler starts out as jump and can be patched to
|
||||
// a call be deoptimization. (4932387)
|
||||
// Note that this value is also credited (in output.cpp) to
|
||||
// the size of the code section.
|
||||
return NativeJump::instruction_size;
|
||||
}
|
||||
|
||||
// Emit exception handler code. Stuff framesize into a register
|
||||
// and call a VM stub routine.
|
||||
int emit_exception_handler(CodeBuffer& cbuf) {
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base =
|
||||
__ start_a_stub(size_exception_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
__ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
|
||||
assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
uint size_deopt_handler() {
|
||||
// NativeCall instruction size is the same as NativeJump.
|
||||
// exception handler starts out as jump and can be patched to
|
||||
// a call be deoptimization. (4932387)
|
||||
// Note that this value is also credited (in output.cpp) to
|
||||
// the size of the code section.
|
||||
return 5 + NativeJump::instruction_size; // pushl(); jmp;
|
||||
}
|
||||
|
||||
// Emit deopt handler code.
|
||||
int emit_deopt_handler(CodeBuffer& cbuf) {
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base =
|
||||
__ start_a_stub(size_exception_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
InternalAddress here(__ pc());
|
||||
__ pushptr(here.addr());
|
||||
|
||||
__ jump(RuntimeAddress(SharedRuntime::deopt_blob()->unpack()));
|
||||
assert(__ offset() - offset <= (int) size_deopt_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
int Matcher::regnum_to_fpu_offset(int regnum) {
|
||||
return regnum - 32; // The FP registers are in the second chunk
|
||||
|
@ -1439,66 +1439,9 @@ uint MachUEPNode::size(PhaseRegAlloc* ra_) const
|
||||
return MachNode::size(ra_); // too many variables; just compute it
|
||||
// the hard way
|
||||
}
|
||||
|
||||
|
||||
|
||||
//=============================================================================
|
||||
uint size_exception_handler()
|
||||
{
|
||||
// NativeCall instruction size is the same as NativeJump.
|
||||
// Note that this value is also credited (in output.cpp) to
|
||||
// the size of the code section.
|
||||
return NativeJump::instruction_size;
|
||||
}
|
||||
|
||||
// Emit exception handler code.
|
||||
int emit_exception_handler(CodeBuffer& cbuf)
|
||||
{
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base =
|
||||
__ start_a_stub(size_exception_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
__ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
|
||||
assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
uint size_deopt_handler()
|
||||
{
|
||||
// three 5 byte instructions
|
||||
return 15;
|
||||
}
|
||||
|
||||
// Emit deopt handler code.
|
||||
int emit_deopt_handler(CodeBuffer& cbuf)
|
||||
{
|
||||
|
||||
// Note that the code buffer's insts_mark is always relative to insts.
|
||||
// That's why we must use the macroassembler to generate a handler.
|
||||
MacroAssembler _masm(&cbuf);
|
||||
address base =
|
||||
__ start_a_stub(size_deopt_handler());
|
||||
if (base == NULL) return 0; // CodeBuffer::expand failed
|
||||
int offset = __ offset();
|
||||
address the_pc = (address) __ pc();
|
||||
Label next;
|
||||
// push a "the_pc" on the stack without destroying any registers
|
||||
// as they all may be live.
|
||||
|
||||
// push address of "next"
|
||||
__ call(next, relocInfo::none); // reloc none is fine since it is a disp32
|
||||
__ bind(next);
|
||||
// adjust it so it matches "the_pc"
|
||||
__ subptr(Address(rsp, 0), __ offset() - offset);
|
||||
__ jump(RuntimeAddress(SharedRuntime::deopt_blob()->unpack()));
|
||||
assert(__ offset() - offset <= (int) size_deopt_handler(), "overflow");
|
||||
__ end_a_stub();
|
||||
return offset;
|
||||
}
|
||||
|
||||
int Matcher::regnum_to_fpu_offset(int regnum)
|
||||
{
|
||||
|
@ -42,18 +42,12 @@
|
||||
#include "runtime/handles.inline.hpp"
|
||||
#include "utilities/xmlstream.hpp"
|
||||
|
||||
extern uint size_exception_handler();
|
||||
extern uint size_deopt_handler();
|
||||
|
||||
#ifndef PRODUCT
|
||||
#define DEBUG_ARG(x) , x
|
||||
#else
|
||||
#define DEBUG_ARG(x)
|
||||
#endif
|
||||
|
||||
extern int emit_exception_handler(CodeBuffer &cbuf);
|
||||
extern int emit_deopt_handler(CodeBuffer &cbuf);
|
||||
|
||||
// Convert Nodes to instruction bits and pass off to the VM
|
||||
void Compile::Output() {
|
||||
// RootNode goes
|
||||
@ -394,6 +388,11 @@ void Compile::shorten_branches(uint* blk_starts, int& code_size, int& reloc_size
|
||||
blk_size += (mach->alignment_required() - 1) * relocInfo::addr_unit(); // assume worst case padding
|
||||
reloc_size += mach->reloc();
|
||||
if (mach->is_MachCall()) {
|
||||
// add size information for trampoline stub
|
||||
// class CallStubImpl is platform-specific and defined in the *.ad files.
|
||||
stub_size += CallStubImpl::size_call_trampoline();
|
||||
reloc_size += CallStubImpl::reloc_call_trampoline();
|
||||
|
||||
MachCallNode *mcall = mach->as_MachCall();
|
||||
// This destination address is NOT PC-relative
|
||||
|
||||
@ -1133,10 +1132,9 @@ CodeBuffer* Compile::init_buffer(uint* blk_starts) {
|
||||
shorten_branches(blk_starts, code_req, locs_req, stub_req);
|
||||
|
||||
// nmethod and CodeBuffer count stubs & constants as part of method's code.
|
||||
int exception_handler_req = size_exception_handler();
|
||||
int deopt_handler_req = size_deopt_handler();
|
||||
exception_handler_req += MAX_stubs_size; // add marginal slop for handler
|
||||
deopt_handler_req += MAX_stubs_size; // add marginal slop for handler
|
||||
// class HandlerImpl is platform-specific and defined in the *.ad files.
|
||||
int exception_handler_req = HandlerImpl::size_exception_handler() + MAX_stubs_size; // add marginal slop for handler
|
||||
int deopt_handler_req = HandlerImpl::size_deopt_handler() + MAX_stubs_size; // add marginal slop for handler
|
||||
stub_req += MAX_stubs_size; // ensure per-stub margin
|
||||
code_req += MAX_inst_size; // ensure per-instruction margin
|
||||
|
||||
@ -1622,17 +1620,18 @@ void Compile::fill_buffer(CodeBuffer* cb, uint* blk_starts) {
|
||||
FillExceptionTables(inct_cnt, call_returns, inct_starts, blk_labels);
|
||||
|
||||
// Only java methods have exception handlers and deopt handlers
|
||||
// class HandlerImpl is platform-specific and defined in the *.ad files.
|
||||
if (_method) {
|
||||
// Emit the exception handler code.
|
||||
_code_offsets.set_value(CodeOffsets::Exceptions, emit_exception_handler(*cb));
|
||||
_code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
|
||||
// Emit the deopt handler code.
|
||||
_code_offsets.set_value(CodeOffsets::Deopt, emit_deopt_handler(*cb));
|
||||
_code_offsets.set_value(CodeOffsets::Deopt, HandlerImpl::emit_deopt_handler(*cb));
|
||||
|
||||
// Emit the MethodHandle deopt handler code (if required).
|
||||
if (has_method_handle_invokes()) {
|
||||
// We can use the same code as for the normal deopt handler, we
|
||||
// just need a different entry point address.
|
||||
_code_offsets.set_value(CodeOffsets::DeoptMH, emit_deopt_handler(*cb));
|
||||
_code_offsets.set_value(CodeOffsets::DeoptMH, HandlerImpl::emit_deopt_handler(*cb));
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user