8036146: make CPP interpreter build again
Fix build of CPP interpreter on x86 and sparc Reviewed-by: kvn
This commit is contained in:
parent
7f243a6751
commit
7bee30f699
@ -413,16 +413,15 @@ void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile
|
||||
// Update standard invocation counters
|
||||
__ increment_invocation_counter(Rcounters, O0, G4_scratch);
|
||||
if (ProfileInterpreter) {
|
||||
Address interpreter_invocation_counter(Rcounters, 0,
|
||||
Address interpreter_invocation_counter(Rcounters,
|
||||
in_bytes(MethodCounters::interpreter_invocation_counter_offset()));
|
||||
__ ld(interpreter_invocation_counter, G4_scratch);
|
||||
__ inc(G4_scratch);
|
||||
__ st(G4_scratch, interpreter_invocation_counter);
|
||||
}
|
||||
|
||||
Address invocation_limit(G3_scratch, (address)&InvocationCounter::InterpreterInvocationLimit);
|
||||
__ sethi(invocation_limit);
|
||||
__ ld(invocation_limit, G3_scratch);
|
||||
AddressLiteral invocation_limit((address)&InvocationCounter::InterpreterInvocationLimit);
|
||||
__ load_contents(invocation_limit, G3_scratch);
|
||||
__ cmp(O0, G3_scratch);
|
||||
__ br(Assembler::greaterEqualUnsigned, false, Assembler::pn, *overflow);
|
||||
__ delayed()->nop();
|
||||
@ -439,7 +438,7 @@ address InterpreterGenerator::generate_empty_entry(void) {
|
||||
// do nothing for empty methods (do not even increment invocation counter)
|
||||
if ( UseFastEmptyMethods) {
|
||||
// If we need a safepoint check, generate full interpreter entry.
|
||||
Address sync_state(G3_scratch, SafepointSynchronize::address_of_state());
|
||||
AddressLiteral sync_state(SafepointSynchronize::address_of_state());
|
||||
__ load_contents(sync_state, G3_scratch);
|
||||
__ cmp(G3_scratch, SafepointSynchronize::_not_synchronized);
|
||||
__ br(Assembler::notEqual, false, Assembler::pn, frame_manager_entry);
|
||||
@ -471,7 +470,7 @@ address InterpreterGenerator::generate_accessor_entry(void) {
|
||||
if ( UseFastAccessorMethods) {
|
||||
// Check if we need to reach a safepoint and generate full interpreter
|
||||
// frame if so.
|
||||
Address sync_state(G3_scratch, SafepointSynchronize::address_of_state());
|
||||
AddressLiteral sync_state(SafepointSynchronize::address_of_state());
|
||||
__ load_contents(sync_state, G3_scratch);
|
||||
__ cmp(G3_scratch, SafepointSynchronize::_not_synchronized);
|
||||
__ br(Assembler::notEqual, false, Assembler::pn, slow_path);
|
||||
@ -486,8 +485,8 @@ address InterpreterGenerator::generate_accessor_entry(void) {
|
||||
|
||||
// read first instruction word and extract bytecode @ 1 and index @ 2
|
||||
// get first 4 bytes of the bytecodes (big endian!)
|
||||
__ ld_ptr(Address(G5_method, 0, in_bytes(Method::const_offset())), G1_scratch);
|
||||
__ ld(Address(G1_scratch, 0, in_bytes(ConstMethod::codes_offset())), G1_scratch);
|
||||
__ ld_ptr(Address(G5_method, in_bytes(Method::const_offset())), G1_scratch);
|
||||
__ ld(Address(G1_scratch, in_bytes(ConstMethod::codes_offset())), G1_scratch);
|
||||
|
||||
// move index @ 2 far left then to the right most two bytes.
|
||||
__ sll(G1_scratch, 2*BitsPerByte, G1_scratch);
|
||||
@ -590,15 +589,15 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
const Register Gtmp1 = G3_scratch ;
|
||||
const Register Gtmp2 = G1_scratch;
|
||||
const Register RconstMethod = Gtmp1;
|
||||
const Address constMethod(G5_method, 0, in_bytes(Method::const_offset()));
|
||||
const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address constMethod(G5_method, in_bytes(Method::const_offset()));
|
||||
const Address size_of_parameters(RconstMethod, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
|
||||
bool inc_counter = UseCompiler || CountCompiledCalls;
|
||||
|
||||
// make sure registers are different!
|
||||
assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
|
||||
|
||||
const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
|
||||
const Address access_flags (G5_method, in_bytes(Method::access_flags_offset()));
|
||||
|
||||
Label Lentry;
|
||||
__ bind(Lentry);
|
||||
@ -643,7 +642,7 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
// At this point Lstate points to new interpreter state
|
||||
//
|
||||
|
||||
const Address do_not_unlock_if_synchronized(G2_thread, 0,
|
||||
const Address do_not_unlock_if_synchronized(G2_thread,
|
||||
in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
|
||||
// Since at this point in the method invocation the exception handler
|
||||
// would try to exit the monitor of synchronized methods which hasn't
|
||||
@ -717,17 +716,17 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
|
||||
{ Label L;
|
||||
__ ld_ptr(STATE(_method), G5_method);
|
||||
__ ld_ptr(Address(G5_method, 0, in_bytes(Method::signature_handler_offset())), G3_scratch);
|
||||
__ ld_ptr(Address(G5_method, in_bytes(Method::signature_handler_offset())), G3_scratch);
|
||||
__ tst(G3_scratch);
|
||||
__ brx(Assembler::notZero, false, Assembler::pt, L);
|
||||
__ delayed()->nop();
|
||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), G5_method, false);
|
||||
__ ld_ptr(STATE(_method), G5_method);
|
||||
|
||||
Address exception_addr(G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
|
||||
Address exception_addr(G2_thread, in_bytes(Thread::pending_exception_offset()));
|
||||
__ ld_ptr(exception_addr, G3_scratch);
|
||||
__ br_notnull_short(G3_scratch, Assembler::pn, pending_exception_present);
|
||||
__ ld_ptr(Address(G5_method, 0, in_bytes(Method::signature_handler_offset())), G3_scratch);
|
||||
__ ld_ptr(Address(G5_method, in_bytes(Method::signature_handler_offset())), G3_scratch);
|
||||
__ bind(L);
|
||||
}
|
||||
|
||||
@ -771,13 +770,13 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
__ br( Assembler::zero, false, Assembler::pt, not_static);
|
||||
__ delayed()->
|
||||
// get native function entry point(O0 is a good temp until the very end)
|
||||
ld_ptr(Address(G5_method, 0, in_bytes(Method::native_function_offset())), O0);
|
||||
ld_ptr(Address(G5_method, in_bytes(Method::native_function_offset())), O0);
|
||||
// for static methods insert the mirror argument
|
||||
const int mirror_offset = in_bytes(Klass::java_mirror_offset());
|
||||
|
||||
__ ld_ptr(Address(G5_method, 0, in_bytes(Method:: const_offset())), O1);
|
||||
__ ld_ptr(Address(O1, 0, in_bytes(ConstMethod::constants_offset())), O1);
|
||||
__ ld_ptr(Address(O1, 0, ConstantPool::pool_holder_offset_in_bytes()), O1);
|
||||
__ ld_ptr(Address(G5_method, in_bytes(Method:: const_offset())), O1);
|
||||
__ ld_ptr(Address(O1, in_bytes(ConstMethod::constants_offset())), O1);
|
||||
__ ld_ptr(Address(O1, ConstantPool::pool_holder_offset_in_bytes()), O1);
|
||||
__ ld_ptr(O1, mirror_offset, O1);
|
||||
// where the mirror handle body is allocated:
|
||||
#ifdef ASSERT
|
||||
@ -831,18 +830,17 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
// flush the windows now. We don't care about the current (protection) frame
|
||||
// only the outer frames
|
||||
|
||||
__ flush_windows();
|
||||
__ flushw();
|
||||
|
||||
// mark windows as flushed
|
||||
Address flags(G2_thread,
|
||||
0,
|
||||
in_bytes(JavaThread::frame_anchor_offset()) + in_bytes(JavaFrameAnchor::flags_offset()));
|
||||
__ set(JavaFrameAnchor::flushed, G3_scratch);
|
||||
__ st(G3_scratch, flags);
|
||||
|
||||
// Transition from _thread_in_Java to _thread_in_native. We are already safepoint ready.
|
||||
|
||||
Address thread_state(G2_thread, 0, in_bytes(JavaThread::thread_state_offset()));
|
||||
Address thread_state(G2_thread, in_bytes(JavaThread::thread_state_offset()));
|
||||
#ifdef ASSERT
|
||||
{ Label L;
|
||||
__ ld(thread_state, G3_scratch);
|
||||
@ -867,7 +865,7 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
// Block, if necessary, before resuming in _thread_in_Java state.
|
||||
// In order for GC to work, don't clear the last_Java_sp until after blocking.
|
||||
{ Label no_block;
|
||||
Address sync_state(G3_scratch, SafepointSynchronize::address_of_state());
|
||||
AddressLiteral sync_state(SafepointSynchronize::address_of_state());
|
||||
|
||||
// Switch thread to "native transition" state before reading the synchronization state.
|
||||
// This additional state is necessary because reading and testing the synchronization
|
||||
@ -890,7 +888,7 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
|
||||
|
||||
Label L;
|
||||
Address suspend_state(G2_thread, 0, in_bytes(JavaThread::suspend_flags_offset()));
|
||||
Address suspend_state(G2_thread, in_bytes(JavaThread::suspend_flags_offset()));
|
||||
__ br(Assembler::notEqual, false, Assembler::pn, L);
|
||||
__ delayed()->
|
||||
ld(suspend_state, G3_scratch);
|
||||
@ -965,7 +963,7 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
|
||||
// handle exceptions (exception handling will handle unlocking!)
|
||||
{ Label L;
|
||||
Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
|
||||
Address exception_addr (G2_thread, in_bytes(Thread::pending_exception_offset()));
|
||||
|
||||
__ ld_ptr(exception_addr, Gtemp);
|
||||
__ tst(Gtemp);
|
||||
@ -1055,8 +1053,8 @@ void CppInterpreterGenerator::generate_compute_interpreter_state(const Register
|
||||
assert_different_registers(state, prev_state);
|
||||
assert_different_registers(prev_state, G3_scratch);
|
||||
const Register Gtmp = G3_scratch;
|
||||
const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
|
||||
const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
|
||||
const Address constMethod (G5_method, in_bytes(Method::const_offset()));
|
||||
const Address access_flags (G5_method, in_bytes(Method::access_flags_offset()));
|
||||
|
||||
// slop factor is two extra slots on the expression stack so that
|
||||
// we always have room to store a result when returning from a call without parameters
|
||||
@ -1075,7 +1073,7 @@ void CppInterpreterGenerator::generate_compute_interpreter_state(const Register
|
||||
|
||||
if (native) {
|
||||
const Register RconstMethod = Gtmp;
|
||||
const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address size_of_parameters(RconstMethod, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
__ ld_ptr(constMethod, RconstMethod);
|
||||
__ lduh( size_of_parameters, Gtmp );
|
||||
__ calc_mem_param_words(Gtmp, Gtmp); // space for native call parameters passed on the stack in words
|
||||
@ -1246,8 +1244,8 @@ void CppInterpreterGenerator::generate_compute_interpreter_state(const Register
|
||||
if (init_value != noreg) {
|
||||
Label clear_loop;
|
||||
const Register RconstMethod = O1;
|
||||
const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address size_of_locals (RconstMethod, 0, in_bytes(ConstMethod::size_of_locals_offset()));
|
||||
const Address size_of_parameters(RconstMethod, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address size_of_locals (RconstMethod, in_bytes(ConstMethod::size_of_locals_offset()));
|
||||
|
||||
// NOTE: If you change the frame layout, this code will need to
|
||||
// be updated!
|
||||
@ -1496,11 +1494,11 @@ void CppInterpreterGenerator::adjust_callers_stack(Register args) {
|
||||
//
|
||||
// assert_different_registers(state, prev_state);
|
||||
const Register Gtmp = G3_scratch;
|
||||
const RconstMethod = G3_scratch;
|
||||
const Register RconstMethod = G3_scratch;
|
||||
const Register tmp = O2;
|
||||
const Address constMethod(G5_method, 0, in_bytes(Method::const_offset()));
|
||||
const Address size_of_parameters(RconstMethod, 0, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address size_of_locals (RconstMethod, 0, in_bytes(ConstMethod::size_of_locals_offset()));
|
||||
const Address constMethod(G5_method, in_bytes(Method::const_offset()));
|
||||
const Address size_of_parameters(RconstMethod, in_bytes(ConstMethod::size_of_parameters_offset()));
|
||||
const Address size_of_locals (RconstMethod, in_bytes(ConstMethod::size_of_locals_offset()));
|
||||
|
||||
__ ld_ptr(constMethod, RconstMethod);
|
||||
__ lduh(size_of_parameters, tmp);
|
||||
@ -1555,8 +1553,8 @@ address InterpreterGenerator::generate_normal_entry(bool synchronized) {
|
||||
const Register Gtmp1 = G3_scratch;
|
||||
// const Register Lmirror = L1; // native mirror (native calls only)
|
||||
|
||||
const Address constMethod (G5_method, 0, in_bytes(Method::const_offset()));
|
||||
const Address access_flags (G5_method, 0, in_bytes(Method::access_flags_offset()));
|
||||
const Address constMethod (G5_method, in_bytes(Method::const_offset()));
|
||||
const Address access_flags (G5_method, in_bytes(Method::access_flags_offset()));
|
||||
|
||||
address entry_point = __ pc();
|
||||
__ mov(G0, prevState); // no current activation
|
||||
@ -1709,7 +1707,7 @@ address InterpreterGenerator::generate_normal_entry(bool synchronized) {
|
||||
|
||||
// We want exception in the thread no matter what we ultimately decide about frame type.
|
||||
|
||||
Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset()));
|
||||
Address exception_addr (G2_thread, in_bytes(Thread::pending_exception_offset()));
|
||||
__ verify_thread();
|
||||
__ st_ptr(O0, exception_addr);
|
||||
|
||||
|
@ -827,6 +827,7 @@ void frame::describe_pd(FrameValues& values, int frame_no) {
|
||||
}
|
||||
|
||||
if (is_interpreted_frame()) {
|
||||
#ifndef CC_INTERP
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_d_scratch_fp);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_l_scratch_fp);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_padding);
|
||||
@ -837,6 +838,7 @@ void frame::describe_pd(FrameValues& values, int frame_no) {
|
||||
if ((esp >= sp()) && (esp < fp())) {
|
||||
values.describe(-1, esp, "*Lesp");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
if (!is_compiled_frame()) {
|
||||
|
@ -2497,6 +2497,24 @@ void InterpreterMacroAssembler::verify_oop_or_return_address(Register reg, Regis
|
||||
void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) {
|
||||
if (state == ftos || state == dtos) MacroAssembler::verify_FPU(stack_depth);
|
||||
}
|
||||
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch1, Register scratch2,
|
||||
Condition cond, Label *where) {
|
||||
ld(counter_addr, scratch1);
|
||||
add(scratch1, increment, scratch1);
|
||||
if (is_simm13(mask)) {
|
||||
andcc(scratch1, mask, G0);
|
||||
} else {
|
||||
set(mask, scratch2);
|
||||
andcc(scratch1, scratch2, G0);
|
||||
}
|
||||
br(cond, false, Assembler::pn, *where);
|
||||
delayed()->st(scratch1, counter_addr);
|
||||
}
|
||||
#endif /* CC_INTERP */
|
||||
|
||||
// Inline assembly for:
|
||||
@ -2646,20 +2664,3 @@ void InterpreterMacroAssembler::restore_return_value( TosState state, bool is_na
|
||||
}
|
||||
#endif // CC_INTERP
|
||||
}
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch1, Register scratch2,
|
||||
Condition cond, Label *where) {
|
||||
ld(counter_addr, scratch1);
|
||||
add(scratch1, increment, scratch1);
|
||||
if (is_simm13(mask)) {
|
||||
andcc(scratch1, mask, G0);
|
||||
} else {
|
||||
set(mask, scratch2);
|
||||
andcc(scratch1, scratch2, G0);
|
||||
}
|
||||
br(cond, false, Assembler::pn, *where);
|
||||
delayed()->st(scratch1, counter_addr);
|
||||
}
|
||||
|
@ -23,7 +23,8 @@
|
||||
*/
|
||||
|
||||
#include "precompiled.hpp"
|
||||
#include "asm/macroAssembler.hpp"
|
||||
#include "asm/macroAssembler.inline.hpp"
|
||||
#include "code/codeCache.hpp"
|
||||
#include "memory/resourceArea.hpp"
|
||||
#include "nativeInst_sparc.hpp"
|
||||
#include "oops/oop.inline.hpp"
|
||||
|
@ -250,7 +250,7 @@ inline jint BytecodeInterpreter::VMintSub(jint op1, jint op2) {
|
||||
return op1 - op2;
|
||||
}
|
||||
|
||||
inline jint BytecodeInterpreter::VMintUshr(jint op1, jint op2) {
|
||||
inline juint BytecodeInterpreter::VMintUshr(jint op1, jint op2) {
|
||||
return ((juint) op1) >> (op2 & 0x1f);
|
||||
}
|
||||
|
||||
|
@ -574,7 +574,7 @@ void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile
|
||||
MethodCounters::invocation_counter_offset() +
|
||||
InvocationCounter::counter_offset());
|
||||
const Address backedge_counter (rax,
|
||||
MethodCounter::backedge_counter_offset() +
|
||||
MethodCounters::backedge_counter_offset() +
|
||||
InvocationCounter::counter_offset());
|
||||
|
||||
__ get_method_counters(rbx, rax, done);
|
||||
@ -982,16 +982,18 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
// to save/restore.
|
||||
address entry_point = __ pc();
|
||||
|
||||
const Address constMethod (rbx, Method::const_offset());
|
||||
const Address access_flags (rbx, Method::access_flags_offset());
|
||||
const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
|
||||
|
||||
// rsi/r13 == state/locals rdi == prevstate
|
||||
const Register locals = rdi;
|
||||
|
||||
// get parameter size (always needed)
|
||||
__ movptr(rcx, constMethod);
|
||||
__ load_unsigned_short(rcx, size_of_parameters);
|
||||
{
|
||||
const Address constMethod (rbx, Method::const_offset());
|
||||
const Address size_of_parameters(rcx, ConstMethod::size_of_parameters_offset());
|
||||
__ movptr(rcx, constMethod);
|
||||
__ load_unsigned_short(rcx, size_of_parameters);
|
||||
}
|
||||
|
||||
// rbx: Method*
|
||||
// rcx: size of parameters
|
||||
@ -1111,14 +1113,16 @@ address InterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
const Register method = rbx;
|
||||
const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rdi);
|
||||
const Register t = InterpreterRuntime::SignatureHandlerGenerator::temp(); // rcx|rscratch1
|
||||
const Address constMethod (method, Method::const_offset());
|
||||
const Address size_of_parameters(t, ConstMethod::size_of_parameters_offset());
|
||||
|
||||
// allocate space for parameters
|
||||
// allocate space for parameters
|
||||
__ movptr(method, STATE(_method));
|
||||
__ verify_method_ptr(method);
|
||||
__ movptr(t, constMethod);
|
||||
__ load_unsigned_short(t, size_of_parameters);
|
||||
{
|
||||
const Address constMethod (method, Method::const_offset());
|
||||
const Address size_of_parameters(t, ConstMethod::size_of_parameters_offset());
|
||||
__ movptr(t, constMethod);
|
||||
__ load_unsigned_short(t, size_of_parameters);
|
||||
}
|
||||
__ shll(t, 2);
|
||||
#ifdef _LP64
|
||||
__ subptr(rsp, t);
|
||||
@ -2221,7 +2225,6 @@ address AbstractInterpreterGenerator::generate_method_entry(AbstractInterpreter:
|
||||
case Interpreter::empty : entry_point = ((InterpreterGenerator*)this)->generate_empty_entry(); break;
|
||||
case Interpreter::accessor : entry_point = ((InterpreterGenerator*)this)->generate_accessor_entry(); break;
|
||||
case Interpreter::abstract : entry_point = ((InterpreterGenerator*)this)->generate_abstract_entry(); break;
|
||||
case Interpreter::method_handle : entry_point = ((InterpreterGenerator*)this)->generate_method_handle_entry(); break;
|
||||
|
||||
case Interpreter::java_lang_math_sin : // fall thru
|
||||
case Interpreter::java_lang_math_cos : // fall thru
|
||||
@ -2229,7 +2232,10 @@ address AbstractInterpreterGenerator::generate_method_entry(AbstractInterpreter:
|
||||
case Interpreter::java_lang_math_abs : // fall thru
|
||||
case Interpreter::java_lang_math_log : // fall thru
|
||||
case Interpreter::java_lang_math_log10 : // fall thru
|
||||
case Interpreter::java_lang_math_sqrt : entry_point = ((InterpreterGenerator*)this)->generate_math_entry(kind); break;
|
||||
case Interpreter::java_lang_math_sqrt : // fall thru
|
||||
case Interpreter::java_lang_math_pow : // fall thru
|
||||
case Interpreter::java_lang_math_exp : // fall thru
|
||||
entry_point = ((InterpreterGenerator*)this)->generate_math_entry(kind); break;
|
||||
case Interpreter::java_lang_ref_reference_get
|
||||
: entry_point = ((InterpreterGenerator*)this)->generate_Reference_get_entry(); break;
|
||||
default : ShouldNotReachHere(); break;
|
||||
@ -2451,4 +2457,22 @@ int AbstractInterpreter::layout_activation(Method* method,
|
||||
return frame_size/BytesPerWord;
|
||||
}
|
||||
|
||||
bool AbstractInterpreter::can_be_compiled(methodHandle m) {
|
||||
switch (method_kind(m)) {
|
||||
case Interpreter::java_lang_math_sin : // fall thru
|
||||
case Interpreter::java_lang_math_cos : // fall thru
|
||||
case Interpreter::java_lang_math_tan : // fall thru
|
||||
case Interpreter::java_lang_math_abs : // fall thru
|
||||
case Interpreter::java_lang_math_log : // fall thru
|
||||
case Interpreter::java_lang_math_log10 : // fall thru
|
||||
case Interpreter::java_lang_math_sqrt : // fall thru
|
||||
case Interpreter::java_lang_math_pow : // fall thru
|
||||
case Interpreter::java_lang_math_exp :
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#endif // CC_INTERP (all)
|
||||
|
@ -687,6 +687,7 @@ intptr_t* frame::interpreter_frame_tos_at(jint offset) const {
|
||||
|
||||
void frame::describe_pd(FrameValues& values, int frame_no) {
|
||||
if (is_interpreted_frame()) {
|
||||
#ifndef CC_INTERP
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_sender_sp);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_last_sp);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_method);
|
||||
@ -695,6 +696,7 @@ void frame::describe_pd(FrameValues& values, int frame_no) {
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_locals);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_bcx);
|
||||
DESCRIBE_FP_OFFSET(interpreter_frame_initial_sp);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
@ -266,20 +266,6 @@ void InterpreterMacroAssembler::get_cache_entry_pointer_at_bcp(Register cache, R
|
||||
addptr(cache, tmp); // construct pointer to cache entry
|
||||
}
|
||||
|
||||
void InterpreterMacroAssembler::get_method_counters(Register method,
|
||||
Register mcs, Label& skip) {
|
||||
Label has_counters;
|
||||
movptr(mcs, Address(method, Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::notZero, has_counters);
|
||||
call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
InterpreterRuntime::build_method_counters), method);
|
||||
movptr(mcs, Address(method,Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
|
||||
bind(has_counters);
|
||||
}
|
||||
|
||||
// Load object from cpool->resolved_references(index)
|
||||
void InterpreterMacroAssembler::load_resolved_reference_at_index(
|
||||
Register result, Register index) {
|
||||
@ -678,6 +664,20 @@ void InterpreterMacroAssembler::remove_activation(TosState state, Register ret_a
|
||||
|
||||
#endif /* !CC_INTERP */
|
||||
|
||||
void InterpreterMacroAssembler::get_method_counters(Register method,
|
||||
Register mcs, Label& skip) {
|
||||
Label has_counters;
|
||||
movptr(mcs, Address(method, Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::notZero, has_counters);
|
||||
call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
InterpreterRuntime::build_method_counters), method);
|
||||
movptr(mcs, Address(method,Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
|
||||
bind(has_counters);
|
||||
}
|
||||
|
||||
|
||||
// Lock object
|
||||
//
|
||||
@ -1359,6 +1359,19 @@ void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) {
|
||||
if (state == ftos || state == dtos) MacroAssembler::verify_FPU(stack_depth);
|
||||
}
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch, bool preloaded,
|
||||
Condition cond, Label* where) {
|
||||
if (!preloaded) {
|
||||
movl(scratch, counter_addr);
|
||||
}
|
||||
incrementl(scratch, increment);
|
||||
movl(counter_addr, scratch);
|
||||
andl(scratch, mask);
|
||||
jcc(cond, *where);
|
||||
}
|
||||
#endif /* CC_INTERP */
|
||||
|
||||
|
||||
@ -1430,17 +1443,3 @@ void InterpreterMacroAssembler::notify_method_exit(
|
||||
NOT_CC_INTERP(pop(state));
|
||||
}
|
||||
}
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch, bool preloaded,
|
||||
Condition cond, Label* where) {
|
||||
if (!preloaded) {
|
||||
movl(scratch, counter_addr);
|
||||
}
|
||||
incrementl(scratch, increment);
|
||||
movl(counter_addr, scratch);
|
||||
andl(scratch, mask);
|
||||
jcc(cond, *where);
|
||||
}
|
||||
|
@ -77,7 +77,6 @@
|
||||
void get_cache_and_index_and_bytecode_at_bcp(Register cache, Register index, Register bytecode, int byte_no, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_cache_entry_pointer_at_bcp(Register cache, Register tmp, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_cache_index_at_bcp(Register index, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_method_counters(Register method, Register mcs, Label& skip);
|
||||
|
||||
// load cpool->resolved_references(index);
|
||||
void load_resolved_reference_at_index(Register result, Register index);
|
||||
@ -156,6 +155,7 @@
|
||||
bool install_monitor_exception = true,
|
||||
bool notify_jvmdi = true);
|
||||
#endif /* !CC_INTERP */
|
||||
void get_method_counters(Register method, Register mcs, Label& skip);
|
||||
|
||||
// Debugging
|
||||
void verify_oop(Register reg, TosState state = atos); // only if +VerifyOops && state == atos
|
||||
|
@ -271,20 +271,6 @@ void InterpreterMacroAssembler::get_cache_entry_pointer_at_bcp(Register cache,
|
||||
addptr(cache, tmp); // construct pointer to cache entry
|
||||
}
|
||||
|
||||
void InterpreterMacroAssembler::get_method_counters(Register method,
|
||||
Register mcs, Label& skip) {
|
||||
Label has_counters;
|
||||
movptr(mcs, Address(method, Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::notZero, has_counters);
|
||||
call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
InterpreterRuntime::build_method_counters), method);
|
||||
movptr(mcs, Address(method,Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
|
||||
bind(has_counters);
|
||||
}
|
||||
|
||||
// Load object from cpool->resolved_references(index)
|
||||
void InterpreterMacroAssembler::load_resolved_reference_at_index(
|
||||
Register result, Register index) {
|
||||
@ -676,6 +662,21 @@ void InterpreterMacroAssembler::remove_activation(
|
||||
|
||||
#endif // C_INTERP
|
||||
|
||||
void InterpreterMacroAssembler::get_method_counters(Register method,
|
||||
Register mcs, Label& skip) {
|
||||
Label has_counters;
|
||||
movptr(mcs, Address(method, Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::notZero, has_counters);
|
||||
call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
InterpreterRuntime::build_method_counters), method);
|
||||
movptr(mcs, Address(method,Method::method_counters_offset()));
|
||||
testptr(mcs, mcs);
|
||||
jcc(Assembler::zero, skip); // No MethodCounters allocated, OutOfMemory
|
||||
bind(has_counters);
|
||||
}
|
||||
|
||||
|
||||
// Lock object
|
||||
//
|
||||
// Args:
|
||||
@ -1423,6 +1424,20 @@ void InterpreterMacroAssembler::verify_oop(Register reg, TosState state) {
|
||||
|
||||
void InterpreterMacroAssembler::verify_FPU(int stack_depth, TosState state) {
|
||||
}
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch, bool preloaded,
|
||||
Condition cond, Label* where) {
|
||||
if (!preloaded) {
|
||||
movl(scratch, counter_addr);
|
||||
}
|
||||
incrementl(scratch, increment);
|
||||
movl(counter_addr, scratch);
|
||||
andl(scratch, mask);
|
||||
jcc(cond, *where);
|
||||
}
|
||||
#endif // !CC_INTERP
|
||||
|
||||
|
||||
@ -1491,16 +1506,3 @@ void InterpreterMacroAssembler::notify_method_exit(
|
||||
}
|
||||
}
|
||||
|
||||
// Jump if ((*counter_addr += increment) & mask) satisfies the condition.
|
||||
void InterpreterMacroAssembler::increment_mask_and_jump(Address counter_addr,
|
||||
int increment, int mask,
|
||||
Register scratch, bool preloaded,
|
||||
Condition cond, Label* where) {
|
||||
if (!preloaded) {
|
||||
movl(scratch, counter_addr);
|
||||
}
|
||||
incrementl(scratch, increment);
|
||||
movl(counter_addr, scratch);
|
||||
andl(scratch, mask);
|
||||
jcc(cond, *where);
|
||||
}
|
||||
|
@ -99,7 +99,6 @@
|
||||
void get_cache_and_index_and_bytecode_at_bcp(Register cache, Register index, Register bytecode, int byte_no, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_cache_entry_pointer_at_bcp(Register cache, Register tmp, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_cache_index_at_bcp(Register index, int bcp_offset, size_t index_size = sizeof(u2));
|
||||
void get_method_counters(Register method, Register mcs, Label& skip);
|
||||
|
||||
// load cpool->resolved_references(index);
|
||||
void load_resolved_reference_at_index(Register result, Register index);
|
||||
@ -172,6 +171,7 @@
|
||||
bool install_monitor_exception = true,
|
||||
bool notify_jvmdi = true);
|
||||
#endif // CC_INTERP
|
||||
void get_method_counters(Register method, Register mcs, Label& skip);
|
||||
|
||||
// Object locking
|
||||
void lock_object (Register lock_reg);
|
||||
|
@ -229,10 +229,12 @@ address InterpreterGenerator::generate_abstract_entry(void) {
|
||||
|
||||
// abstract method entry
|
||||
|
||||
#ifndef CC_INTERP
|
||||
// pop return address, reset last_sp to NULL
|
||||
__ empty_expression_stack();
|
||||
__ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
|
||||
__ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
|
||||
#endif
|
||||
|
||||
// throw exception
|
||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
|
||||
|
@ -310,10 +310,12 @@ address InterpreterGenerator::generate_abstract_entry(void) {
|
||||
|
||||
// abstract method entry
|
||||
|
||||
#ifndef CC_INTERP
|
||||
// pop return address, reset last_sp to NULL
|
||||
__ empty_expression_stack();
|
||||
__ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
|
||||
__ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
|
||||
#endif
|
||||
|
||||
// throw exception
|
||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
|
@ -3475,7 +3475,7 @@ BytecodeInterpreter::print() {
|
||||
tty->print_cr("&native_fresult: " INTPTR_FORMAT, (uintptr_t) &this->_native_fresult);
|
||||
tty->print_cr("native_lresult: " INTPTR_FORMAT, (uintptr_t) this->_native_lresult);
|
||||
#endif
|
||||
#if !defined(ZERO)
|
||||
#if !defined(ZERO) && defined(PPC)
|
||||
tty->print_cr("last_Java_fp: " INTPTR_FORMAT, (uintptr_t) this->_last_Java_fp);
|
||||
#endif // !ZERO
|
||||
tty->print_cr("self_link: " INTPTR_FORMAT, (uintptr_t) this->_self_link);
|
||||
|
Loading…
Reference in New Issue
Block a user