8293769: RISC-V: Add a second temporary register for BarrierSetAssembler::load_at
Reviewed-by: fjiang, shade
This commit is contained in:
parent
d191e47516
commit
7376c55219
@ -100,7 +100,8 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm,
|
||||
Register obj,
|
||||
Register pre_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2,
|
||||
bool tosca_live,
|
||||
bool expand_call) {
|
||||
// If expand_call is true then we expand the call_VM_leaf macro
|
||||
@ -112,8 +113,8 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm,
|
||||
Label done;
|
||||
Label runtime;
|
||||
|
||||
assert_different_registers(obj, pre_val, tmp, t0);
|
||||
assert(pre_val != noreg && tmp != noreg, "expecting a register");
|
||||
assert_different_registers(obj, pre_val, tmp1, tmp2);
|
||||
assert(pre_val != noreg && tmp1 != noreg && tmp2 != noreg, "expecting a register");
|
||||
|
||||
Address in_progress(thread, in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()));
|
||||
Address index(thread, in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset()));
|
||||
@ -121,12 +122,12 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm,
|
||||
|
||||
// Is marking active?
|
||||
if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) { // 4-byte width
|
||||
__ lwu(tmp, in_progress);
|
||||
__ lwu(tmp1, in_progress);
|
||||
} else {
|
||||
assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");
|
||||
__ lbu(tmp, in_progress);
|
||||
__ lbu(tmp1, in_progress);
|
||||
}
|
||||
__ beqz(tmp, done);
|
||||
__ beqz(tmp1, done);
|
||||
|
||||
// Do we need to load the previous value?
|
||||
if (obj != noreg) {
|
||||
@ -140,17 +141,17 @@ void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm,
|
||||
// Is index == 0?
|
||||
// (The index field is typed as size_t.)
|
||||
|
||||
__ ld(tmp, index); // tmp := *index_adr
|
||||
__ beqz(tmp, runtime); // tmp == 0?
|
||||
// If yes, goto runtime
|
||||
__ ld(tmp1, index); // tmp := *index_adr
|
||||
__ beqz(tmp1, runtime); // tmp == 0?
|
||||
// If yes, goto runtime
|
||||
|
||||
__ sub(tmp, tmp, wordSize); // tmp := tmp - wordSize
|
||||
__ sd(tmp, index); // *index_adr := tmp
|
||||
__ ld(t0, buffer);
|
||||
__ add(tmp, tmp, t0); // tmp := tmp + *buffer_adr
|
||||
__ sub(tmp1, tmp1, wordSize); // tmp := tmp - wordSize
|
||||
__ sd(tmp1, index); // *index_adr := tmp
|
||||
__ ld(tmp2, buffer);
|
||||
__ add(tmp1, tmp1, tmp2); // tmp := tmp + *buffer_adr
|
||||
|
||||
// Record the previous value
|
||||
__ sd(pre_val, Address(tmp, 0));
|
||||
__ sd(pre_val, Address(tmp1, 0));
|
||||
__ j(done);
|
||||
|
||||
__ bind(runtime);
|
||||
@ -174,12 +175,11 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
|
||||
Register store_addr,
|
||||
Register new_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2) {
|
||||
assert(thread == xthread, "must be");
|
||||
assert_different_registers(store_addr, new_val, thread, tmp, tmp2,
|
||||
t0);
|
||||
assert(store_addr != noreg && new_val != noreg && tmp != noreg &&
|
||||
assert_different_registers(store_addr, new_val, thread, tmp1, tmp2, t0);
|
||||
assert(store_addr != noreg && new_val != noreg && tmp1 != noreg &&
|
||||
tmp2 != noreg, "expecting a register");
|
||||
|
||||
Address queue_index(thread, in_bytes(G1ThreadLocalData::dirty_card_queue_index_offset()));
|
||||
@ -194,9 +194,9 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
|
||||
|
||||
// Does store cross heap regions?
|
||||
|
||||
__ xorr(tmp, store_addr, new_val);
|
||||
__ srli(tmp, tmp, HeapRegion::LogOfHRGrainBytes);
|
||||
__ beqz(tmp, done);
|
||||
__ xorr(tmp1, store_addr, new_val);
|
||||
__ srli(tmp1, tmp1, HeapRegion::LogOfHRGrainBytes);
|
||||
__ beqz(tmp1, done);
|
||||
|
||||
// crosses regions, storing NULL?
|
||||
|
||||
@ -205,7 +205,7 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
|
||||
// storing region crossing non-NULL, is card already dirty?
|
||||
|
||||
ExternalAddress cardtable((address) ct->byte_map_base());
|
||||
const Register card_addr = tmp;
|
||||
const Register card_addr = tmp1;
|
||||
|
||||
__ srli(card_addr, store_addr, CardTable::card_shift());
|
||||
|
||||
@ -249,12 +249,12 @@ void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
void G1BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Register dst, Address src, Register tmp1, Register tmp_thread) {
|
||||
Register dst, Address src, Register tmp1, Register tmp2) {
|
||||
bool on_oop = is_reference_type(type);
|
||||
bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
|
||||
bool on_phantom = (decorators & ON_PHANTOM_OOP_REF) != 0;
|
||||
bool on_reference = on_weak || on_phantom;
|
||||
ModRefBarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
|
||||
ModRefBarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
|
||||
if (on_oop && on_reference) {
|
||||
// RA is live. It must be saved around calls.
|
||||
__ enter(); // barrier may call runtime
|
||||
@ -264,7 +264,8 @@ void G1BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorator
|
||||
noreg /* obj */,
|
||||
dst /* pre_val */,
|
||||
xthread /* thread */,
|
||||
tmp1 /* tmp */,
|
||||
tmp1 /* tmp1 */,
|
||||
tmp2 /* tmp2 */,
|
||||
true /* tosca_live */,
|
||||
true /* expand_call */);
|
||||
__ leave();
|
||||
@ -286,7 +287,8 @@ void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet deco
|
||||
tmp3 /* obj */,
|
||||
tmp2 /* pre_val */,
|
||||
xthread /* thread */,
|
||||
tmp1 /* tmp */,
|
||||
tmp1 /* tmp1 */,
|
||||
t1 /* tmp2 */,
|
||||
val != noreg /* tosca_live */,
|
||||
false /* expand_call */);
|
||||
|
||||
@ -304,7 +306,7 @@ void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet deco
|
||||
tmp3 /* store_adr */,
|
||||
new_val /* new_val */,
|
||||
xthread /* thread */,
|
||||
tmp1 /* tmp */,
|
||||
tmp1 /* tmp1 */,
|
||||
tmp2 /* tmp2 */);
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,8 @@ protected:
|
||||
Register obj,
|
||||
Register pre_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2,
|
||||
bool tosca_live,
|
||||
bool expand_call);
|
||||
|
||||
@ -56,7 +57,7 @@ protected:
|
||||
Register store_addr,
|
||||
Register new_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2);
|
||||
|
||||
virtual void oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
@ -72,7 +73,7 @@ public:
|
||||
#endif
|
||||
|
||||
void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Register dst, Address src, Register tmp1, Register tmp_thread);
|
||||
Register dst, Address src, Register tmp1, Register tmp2);
|
||||
};
|
||||
|
||||
#endif // CPU_RISCV_GC_G1_G1BARRIERSETASSEMBLER_RISCV_HPP
|
||||
|
@ -39,7 +39,7 @@
|
||||
#define __ masm->
|
||||
|
||||
void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Register dst, Address src, Register tmp1, Register tmp_thread) {
|
||||
Register dst, Address src, Register tmp1, Register tmp2) {
|
||||
// RA is live. It must be saved around calls.
|
||||
|
||||
bool in_heap = (decorators & IN_HEAP) != 0;
|
||||
@ -290,15 +290,14 @@ void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {
|
||||
__ bnez(t1, method_live);
|
||||
|
||||
// Is it a weak but alive CLD?
|
||||
__ push_reg(RegSet::of(x28, x29), sp);
|
||||
__ push_reg(RegSet::of(x28), sp);
|
||||
|
||||
__ ld(x28, Address(t0, ClassLoaderData::holder_offset()));
|
||||
|
||||
// Uses x28 & x29, so we must pass new temporaries.
|
||||
__ resolve_weak_handle(x28, x29);
|
||||
__ resolve_weak_handle(x28, t0, t1);
|
||||
__ mv(t0, x28);
|
||||
|
||||
__ pop_reg(RegSet::of(x28, x29), sp);
|
||||
__ pop_reg(RegSet::of(x28), sp);
|
||||
|
||||
__ bnez(t0, method_live);
|
||||
|
||||
|
@ -50,7 +50,7 @@ public:
|
||||
virtual void arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
|
||||
Register start, Register end, Register tmp, RegSet saved_regs) {}
|
||||
virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Register dst, Address src, Register tmp1, Register tmp_thread);
|
||||
Register dst, Address src, Register tmp1, Register tmp2);
|
||||
virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Address dst, Register val, Register tmp1, Register tmp2, Register tmp3);
|
||||
|
||||
|
@ -89,7 +89,7 @@ void ShenandoahBarrierSetAssembler::shenandoah_write_barrier_pre(MacroAssembler*
|
||||
bool tosca_live,
|
||||
bool expand_call) {
|
||||
if (ShenandoahSATBBarrier) {
|
||||
satb_write_barrier_pre(masm, obj, pre_val, thread, tmp, tosca_live, expand_call);
|
||||
satb_write_barrier_pre(masm, obj, pre_val, thread, tmp, t0, tosca_live, expand_call);
|
||||
}
|
||||
}
|
||||
|
||||
@ -97,7 +97,8 @@ void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
|
||||
Register obj,
|
||||
Register pre_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2,
|
||||
bool tosca_live,
|
||||
bool expand_call) {
|
||||
// If expand_call is true then we expand the call_VM_leaf macro
|
||||
@ -108,8 +109,8 @@ void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
|
||||
Label done;
|
||||
Label runtime;
|
||||
|
||||
assert_different_registers(obj, pre_val, tmp, t0);
|
||||
assert(pre_val != noreg && tmp != noreg, "expecting a register");
|
||||
assert_different_registers(obj, pre_val, tmp1, tmp2);
|
||||
assert(pre_val != noreg && tmp1 != noreg && tmp2 != noreg, "expecting a register");
|
||||
|
||||
Address in_progress(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_active_offset()));
|
||||
Address index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
|
||||
@ -117,12 +118,12 @@ void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
|
||||
|
||||
// Is marking active?
|
||||
if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
|
||||
__ lwu(tmp, in_progress);
|
||||
__ lwu(tmp1, in_progress);
|
||||
} else {
|
||||
assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");
|
||||
__ lbu(tmp, in_progress);
|
||||
__ lbu(tmp1, in_progress);
|
||||
}
|
||||
__ beqz(tmp, done);
|
||||
__ beqz(tmp1, done);
|
||||
|
||||
// Do we need to load the previous value?
|
||||
if (obj != noreg) {
|
||||
@ -135,16 +136,16 @@ void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
|
||||
// Can we store original value in the thread's buffer?
|
||||
// Is index == 0?
|
||||
// (The index field is typed as size_t.)
|
||||
__ ld(tmp, index); // tmp := *index_adr
|
||||
__ beqz(tmp, runtime); // tmp == 0? If yes, goto runtime
|
||||
__ ld(tmp1, index); // tmp := *index_adr
|
||||
__ beqz(tmp1, runtime); // tmp == 0? If yes, goto runtime
|
||||
|
||||
__ sub(tmp, tmp, wordSize); // tmp := tmp - wordSize
|
||||
__ sd(tmp, index); // *index_adr := tmp
|
||||
__ ld(t0, buffer);
|
||||
__ add(tmp, tmp, t0); // tmp := tmp + *buffer_adr
|
||||
__ sub(tmp1, tmp1, wordSize); // tmp := tmp - wordSize
|
||||
__ sd(tmp1, index); // *index_adr := tmp
|
||||
__ ld(tmp2, buffer);
|
||||
__ add(tmp1, tmp1, tmp2); // tmp := tmp + *buffer_adr
|
||||
|
||||
// Record the previous value
|
||||
__ sd(pre_val, Address(tmp, 0));
|
||||
__ sd(pre_val, Address(tmp1, 0));
|
||||
__ j(done);
|
||||
|
||||
__ bind(runtime);
|
||||
@ -314,7 +315,7 @@ void ShenandoahBarrierSetAssembler::iu_barrier(MacroAssembler* masm, Register ds
|
||||
if (ShenandoahIUBarrier) {
|
||||
__ push_call_clobbered_registers();
|
||||
|
||||
satb_write_barrier_pre(masm, noreg, dst, xthread, tmp, true, false);
|
||||
satb_write_barrier_pre(masm, noreg, dst, xthread, tmp, t0, true, false);
|
||||
|
||||
__ pop_call_clobbered_registers();
|
||||
}
|
||||
@ -341,10 +342,10 @@ void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
|
||||
Register dst,
|
||||
Address src,
|
||||
Register tmp1,
|
||||
Register tmp_thread) {
|
||||
Register tmp2) {
|
||||
// 1: non-reference load, no additional barrier is needed
|
||||
if (!is_reference_type(type)) {
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -361,7 +362,7 @@ void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
|
||||
}
|
||||
assert_different_registers(dst, src.base());
|
||||
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
|
||||
|
||||
load_reference_barrier(masm, dst, src, decorators);
|
||||
|
||||
@ -374,7 +375,7 @@ void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
|
||||
__ pop_reg(saved_regs, sp);
|
||||
}
|
||||
} else {
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
|
||||
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
|
||||
}
|
||||
|
||||
// 3: apply keep-alive barrier if needed
|
||||
@ -385,7 +386,8 @@ void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
|
||||
noreg /* obj */,
|
||||
dst /* pre_val */,
|
||||
xthread /* thread */,
|
||||
tmp1 /* tmp */,
|
||||
tmp1 /* tmp1 */,
|
||||
tmp2 /* tmp2 */,
|
||||
true /* tosca_live */,
|
||||
true /* expand_call */);
|
||||
__ pop_call_clobbered_registers();
|
||||
|
@ -44,7 +44,8 @@ private:
|
||||
Register obj,
|
||||
Register pre_val,
|
||||
Register thread,
|
||||
Register tmp,
|
||||
Register tmp1,
|
||||
Register tmp2,
|
||||
bool tosca_live,
|
||||
bool expand_call);
|
||||
void shenandoah_write_barrier_pre(MacroAssembler* masm,
|
||||
@ -76,7 +77,7 @@ public:
|
||||
Register src, Register dst, Register count, RegSet saved_regs);
|
||||
|
||||
virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Register dst, Address src, Register tmp1, Register tmp_thread);
|
||||
Register dst, Address src, Register tmp1, Register tmp2);
|
||||
virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
|
||||
Address dst, Register val, Register tmp1, Register tmp2, Register tmp3);
|
||||
|
||||
|
@ -280,7 +280,7 @@ void InterpreterMacroAssembler::load_resolved_reference_at_index(
|
||||
// Load pointer for resolved_references[] objArray
|
||||
ld(result, Address(result, ConstantPool::cache_offset_in_bytes()));
|
||||
ld(result, Address(result, ConstantPoolCache::resolved_references_offset_in_bytes()));
|
||||
resolve_oop_handle(result, tmp);
|
||||
resolve_oop_handle(result, tmp, t1);
|
||||
// Add in the index
|
||||
addi(index, index, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
|
||||
shadd(result, index, result, index, LogBytesPerHeapOop);
|
||||
|
@ -511,7 +511,7 @@ void MacroAssembler::debug64(char* msg, int64_t pc, int64_t regs[])
|
||||
fatal("DEBUG MESSAGE: %s", msg);
|
||||
}
|
||||
|
||||
void MacroAssembler::resolve_jobject(Register value, Register thread, Register tmp) {
|
||||
void MacroAssembler::resolve_jobject(Register value, Register tmp1, Register tmp2) {
|
||||
Label done, not_weak;
|
||||
beqz(value, done); // Use NULL as-is.
|
||||
|
||||
@ -521,13 +521,13 @@ void MacroAssembler::resolve_jobject(Register value, Register thread, Register t
|
||||
|
||||
// Resolve jweak.
|
||||
access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, value,
|
||||
Address(value, -JNIHandles::weak_tag_value), tmp, thread);
|
||||
Address(value, -JNIHandles::weak_tag_value), tmp1, tmp2);
|
||||
verify_oop(value);
|
||||
j(done);
|
||||
|
||||
bind(not_weak);
|
||||
// Resolve (untagged) jobject.
|
||||
access_load_at(T_OBJECT, IN_NATIVE, value, Address(value, 0), tmp, thread);
|
||||
access_load_at(T_OBJECT, IN_NATIVE, value, Address(value, 0), tmp1, tmp2);
|
||||
verify_oop(value);
|
||||
bind(done);
|
||||
}
|
||||
@ -1708,24 +1708,24 @@ SkipIfEqual::~SkipIfEqual() {
|
||||
_masm = NULL;
|
||||
}
|
||||
|
||||
void MacroAssembler::load_mirror(Register dst, Register method, Register tmp) {
|
||||
void MacroAssembler::load_mirror(Register dst, Register method, Register tmp1, Register tmp2) {
|
||||
const int mirror_offset = in_bytes(Klass::java_mirror_offset());
|
||||
ld(dst, Address(xmethod, Method::const_offset()));
|
||||
ld(dst, Address(dst, ConstMethod::constants_offset()));
|
||||
ld(dst, Address(dst, ConstantPool::pool_holder_offset_in_bytes()));
|
||||
ld(dst, Address(dst, mirror_offset));
|
||||
resolve_oop_handle(dst, tmp);
|
||||
resolve_oop_handle(dst, tmp1, tmp2);
|
||||
}
|
||||
|
||||
void MacroAssembler::resolve_oop_handle(Register result, Register tmp) {
|
||||
void MacroAssembler::resolve_oop_handle(Register result, Register tmp1, Register tmp2) {
|
||||
// OopHandle::resolve is an indirection.
|
||||
assert_different_registers(result, tmp);
|
||||
access_load_at(T_OBJECT, IN_NATIVE, result, Address(result, 0), tmp, noreg);
|
||||
assert_different_registers(result, tmp1, tmp2);
|
||||
access_load_at(T_OBJECT, IN_NATIVE, result, Address(result, 0), tmp1, tmp2);
|
||||
}
|
||||
|
||||
// ((WeakHandle)result).resolve()
|
||||
void MacroAssembler::resolve_weak_handle(Register result, Register tmp) {
|
||||
assert_different_registers(result, tmp);
|
||||
void MacroAssembler::resolve_weak_handle(Register result, Register tmp1, Register tmp2) {
|
||||
assert_different_registers(result, tmp1, tmp2);
|
||||
Label resolved;
|
||||
|
||||
// A null weak handle resolves to null.
|
||||
@ -1735,20 +1735,20 @@ void MacroAssembler::resolve_weak_handle(Register result, Register tmp) {
|
||||
// Only IN_HEAP loads require a thread_tmp register
|
||||
// WeakHandle::resolve is an indirection like jweak.
|
||||
access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
|
||||
result, Address(result), tmp, noreg /* tmp_thread */);
|
||||
result, Address(result), tmp1, tmp2);
|
||||
bind(resolved);
|
||||
}
|
||||
|
||||
void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators,
|
||||
Register dst, Address src,
|
||||
Register tmp1, Register thread_tmp) {
|
||||
Register tmp1, Register tmp2) {
|
||||
BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler();
|
||||
decorators = AccessInternal::decorator_fixup(decorators);
|
||||
bool as_raw = (decorators & AS_RAW) != 0;
|
||||
if (as_raw) {
|
||||
bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
|
||||
bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, tmp2);
|
||||
} else {
|
||||
bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
|
||||
bs->load_at(this, decorators, type, dst, src, tmp1, tmp2);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1946,13 +1946,13 @@ void MacroAssembler::store_heap_oop(Address dst, Register src, Register tmp1,
|
||||
}
|
||||
|
||||
void MacroAssembler::load_heap_oop(Register dst, Address src, Register tmp1,
|
||||
Register thread_tmp, DecoratorSet decorators) {
|
||||
access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, thread_tmp);
|
||||
Register tmp2, DecoratorSet decorators) {
|
||||
access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, tmp2);
|
||||
}
|
||||
|
||||
void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
|
||||
Register thread_tmp, DecoratorSet decorators) {
|
||||
access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL, dst, src, tmp1, thread_tmp);
|
||||
Register tmp2, DecoratorSet decorators) {
|
||||
access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL, dst, src, tmp1, tmp2);
|
||||
}
|
||||
|
||||
// Used for storing NULLs.
|
||||
|
@ -171,9 +171,9 @@ class MacroAssembler: public Assembler {
|
||||
virtual void check_and_handle_earlyret(Register java_thread);
|
||||
virtual void check_and_handle_popframe(Register java_thread);
|
||||
|
||||
void resolve_weak_handle(Register result, Register tmp);
|
||||
void resolve_oop_handle(Register result, Register tmp = x15);
|
||||
void resolve_jobject(Register value, Register thread, Register tmp);
|
||||
void resolve_weak_handle(Register result, Register tmp1, Register tmp2);
|
||||
void resolve_oop_handle(Register result, Register tmp1, Register tmp2);
|
||||
void resolve_jobject(Register value, Register tmp1, Register tmp2);
|
||||
|
||||
void movoop(Register dst, jobject obj);
|
||||
void mov_metadata(Register dst, Metadata* obj);
|
||||
@ -181,9 +181,9 @@ class MacroAssembler: public Assembler {
|
||||
void set_narrow_oop(Register dst, jobject obj);
|
||||
void set_narrow_klass(Register dst, Klass* k);
|
||||
|
||||
void load_mirror(Register dst, Register method, Register tmp = x15);
|
||||
void load_mirror(Register dst, Register method, Register tmp1, Register tmp2);
|
||||
void access_load_at(BasicType type, DecoratorSet decorators, Register dst,
|
||||
Address src, Register tmp1, Register thread_tmp);
|
||||
Address src, Register tmp1, Register tmp2);
|
||||
void access_store_at(BasicType type, DecoratorSet decorators, Address dst,
|
||||
Register src, Register tmp1, Register tmp2, Register tmp3);
|
||||
void load_klass(Register dst, Register src);
|
||||
@ -201,9 +201,9 @@ class MacroAssembler: public Assembler {
|
||||
void encode_heap_oop(Register d, Register s);
|
||||
void encode_heap_oop(Register r) { encode_heap_oop(r, r); };
|
||||
void load_heap_oop(Register dst, Address src, Register tmp1 = noreg,
|
||||
Register thread_tmp = noreg, DecoratorSet decorators = 0);
|
||||
Register tmp2 = noreg, DecoratorSet decorators = 0);
|
||||
void load_heap_oop_not_null(Register dst, Address src, Register tmp1 = noreg,
|
||||
Register thread_tmp = noreg, DecoratorSet decorators = 0);
|
||||
Register tmp2 = noreg, DecoratorSet decorators = 0);
|
||||
void store_heap_oop(Address dst, Register src, Register tmp1 = noreg,
|
||||
Register tmp2 = noreg, Register tmp3 = noreg, DecoratorSet decorators = 0);
|
||||
|
||||
|
@ -1677,7 +1677,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
|
||||
|
||||
// Unbox oop result, e.g. JNIHandles::resolve result.
|
||||
if (is_reference_type(ret_type)) {
|
||||
__ resolve_jobject(x10, xthread, t1);
|
||||
__ resolve_jobject(x10, x11, x12);
|
||||
}
|
||||
|
||||
if (CheckJNICalls) {
|
||||
|
@ -3804,15 +3804,16 @@ class StubGenerator: public StubCodeGenerator {
|
||||
__ mv(c_rarg0, thread);
|
||||
}
|
||||
|
||||
static void jfr_epilogue(MacroAssembler* _masm, Register thread) {
|
||||
static void jfr_epilogue(MacroAssembler* _masm) {
|
||||
__ reset_last_Java_frame(true);
|
||||
Label null_jobject;
|
||||
__ beqz(x10, null_jobject);
|
||||
DecoratorSet decorators = ACCESS_READ | IN_NATIVE;
|
||||
BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
|
||||
bs->load_at(_masm, decorators, T_OBJECT, x10, Address(x10, 0), c_rarg0, thread);
|
||||
bs->load_at(_masm, decorators, T_OBJECT, x10, Address(x10, 0), t0, t1);
|
||||
__ bind(null_jobject);
|
||||
}
|
||||
|
||||
// For c2: c_rarg0 is junk, call to runtime to write a checkpoint.
|
||||
// It returns a jobject handle to the event writer.
|
||||
// The handle is dereferenced and the return value is the event writer oop.
|
||||
@ -3838,7 +3839,7 @@ class StubGenerator: public StubCodeGenerator {
|
||||
address the_pc = __ pc();
|
||||
jfr_prologue(the_pc, _masm, xthread);
|
||||
__ call_VM_leaf(CAST_FROM_FN_PTR(address, JfrIntrinsicSupport::write_checkpoint), 1);
|
||||
jfr_epilogue(_masm, xthread);
|
||||
jfr_epilogue(_masm);
|
||||
__ leave();
|
||||
__ ret();
|
||||
|
||||
|
@ -686,7 +686,7 @@ void TemplateInterpreterGenerator::lock_method() {
|
||||
// get receiver (assume this is frequent case)
|
||||
__ ld(x10, Address(xlocals, Interpreter::local_offset_in_bytes(0)));
|
||||
__ beqz(t0, done);
|
||||
__ load_mirror(x10, xmethod);
|
||||
__ load_mirror(x10, xmethod, x15, t1);
|
||||
|
||||
#ifdef ASSERT
|
||||
{
|
||||
@ -766,7 +766,7 @@ void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) {
|
||||
__ sd(zr, Address(sp, 8 * wordSize));
|
||||
|
||||
// Get mirror
|
||||
__ load_mirror(t2, xmethod);
|
||||
__ load_mirror(t2, xmethod, x15, t1);
|
||||
if (!native_call) {
|
||||
__ ld(t0, Address(xmethod, Method::const_offset()));
|
||||
__ lhu(t0, Address(t0, ConstMethod::max_stack_offset()));
|
||||
@ -839,7 +839,7 @@ address TemplateInterpreterGenerator::generate_Reference_get_entry(void) {
|
||||
// Load the value of the referent field.
|
||||
const Address field_address(local_0, referent_offset);
|
||||
BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler();
|
||||
bs->load_at(_masm, IN_HEAP | ON_WEAK_OOP_REF, T_OBJECT, local_0, field_address, /*tmp1*/ t1, /*tmp2*/ t0);
|
||||
bs->load_at(_masm, IN_HEAP | ON_WEAK_OOP_REF, T_OBJECT, local_0, field_address, /*tmp1*/ t0, /*tmp2*/ t1);
|
||||
|
||||
// areturn
|
||||
__ andi(sp, x19_sender_sp, -16); // done with stack
|
||||
@ -1047,7 +1047,7 @@ address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
assert(InterpreterRuntime::SignatureHandlerGenerator::to() == sp,
|
||||
"adjust this code");
|
||||
assert(InterpreterRuntime::SignatureHandlerGenerator::temp() == t0,
|
||||
"adjust this code");
|
||||
"adjust this code");
|
||||
|
||||
// The generated handlers do not touch xmethod (the method).
|
||||
// However, large signatures cannot be cached and are generated
|
||||
@ -1067,7 +1067,7 @@ address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
__ andi(t0, t, JVM_ACC_STATIC);
|
||||
__ beqz(t0, L);
|
||||
// get mirror
|
||||
__ load_mirror(t, xmethod);
|
||||
__ load_mirror(t, xmethod, x28, t1);
|
||||
// copy mirror into activation frame
|
||||
__ sd(t, Address(fp, frame::interpreter_frame_oop_temp_offset * wordSize));
|
||||
// pass handle to mirror
|
||||
@ -1203,7 +1203,7 @@ address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
|
||||
__ bne(t, result_handler, no_oop);
|
||||
// Unbox oop result, e.g. JNIHandles::resolve result.
|
||||
__ pop(ltos);
|
||||
__ resolve_jobject(x10, xthread, t);
|
||||
__ resolve_jobject(x10, t, t1);
|
||||
__ sd(x10, Address(fp, frame::interpreter_frame_oop_temp_offset * wordSize));
|
||||
// keep stack depth as expected by pushing oop which will eventually be discarded
|
||||
__ push(ltos);
|
||||
|
@ -103,23 +103,23 @@ static inline Address at_tos () {
|
||||
}
|
||||
|
||||
static inline Address at_tos_p1() {
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(1));
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(1));
|
||||
}
|
||||
|
||||
static inline Address at_tos_p2() {
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(2));
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(2));
|
||||
}
|
||||
|
||||
static inline Address at_tos_p3() {
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(3));
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(3));
|
||||
}
|
||||
|
||||
static inline Address at_tos_p4() {
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(4));
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(4));
|
||||
}
|
||||
|
||||
static inline Address at_tos_p5() {
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(5));
|
||||
return Address(esp, Interpreter::expr_offset_in_bytes(5));
|
||||
}
|
||||
|
||||
// Miscellaneous helper routines
|
||||
@ -130,14 +130,14 @@ static void do_oop_store(InterpreterMacroAssembler* _masm,
|
||||
Register val,
|
||||
DecoratorSet decorators) {
|
||||
assert(val == noreg || val == x10, "parameter is just for looks");
|
||||
__ store_heap_oop(dst, val, x29, x11, x13, decorators);
|
||||
__ store_heap_oop(dst, val, x28, x29, x13, decorators);
|
||||
}
|
||||
|
||||
static void do_oop_load(InterpreterMacroAssembler* _masm,
|
||||
Address src,
|
||||
Register dst,
|
||||
DecoratorSet decorators) {
|
||||
__ load_heap_oop(dst, src, x7, x11, decorators);
|
||||
__ load_heap_oop(dst, src, x28, x29, decorators);
|
||||
}
|
||||
|
||||
Address TemplateTable::at_bcp(int offset) {
|
||||
@ -146,10 +146,9 @@ Address TemplateTable::at_bcp(int offset) {
|
||||
}
|
||||
|
||||
void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
|
||||
Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
|
||||
int byte_no)
|
||||
{
|
||||
if (!RewriteBytecodes) { return; }
|
||||
Register temp_reg, bool load_bc_into_bc_reg /*=true*/,
|
||||
int byte_no) {
|
||||
if (!RewriteBytecodes) { return; }
|
||||
Label L_patch_done;
|
||||
|
||||
switch (bc) {
|
||||
@ -221,26 +220,22 @@ void TemplateTable::shouldnotreachhere() {
|
||||
__ stop("should not reach here bytecode");
|
||||
}
|
||||
|
||||
void TemplateTable::aconst_null()
|
||||
{
|
||||
void TemplateTable::aconst_null() {
|
||||
transition(vtos, atos);
|
||||
__ mv(x10, zr);
|
||||
}
|
||||
|
||||
void TemplateTable::iconst(int value)
|
||||
{
|
||||
void TemplateTable::iconst(int value) {
|
||||
transition(vtos, itos);
|
||||
__ mv(x10, value);
|
||||
}
|
||||
|
||||
void TemplateTable::lconst(int value)
|
||||
{
|
||||
void TemplateTable::lconst(int value) {
|
||||
transition(vtos, ltos);
|
||||
__ mv(x10, value);
|
||||
}
|
||||
|
||||
void TemplateTable::fconst(int value)
|
||||
{
|
||||
void TemplateTable::fconst(int value) {
|
||||
transition(vtos, ftos);
|
||||
static float fBuf[2] = {1.0, 2.0};
|
||||
__ mv(t0, (intptr_t)fBuf);
|
||||
@ -259,8 +254,7 @@ void TemplateTable::fconst(int value)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::dconst(int value)
|
||||
{
|
||||
void TemplateTable::dconst(int value) {
|
||||
transition(vtos, dtos);
|
||||
static double dBuf[2] = {1.0, 2.0};
|
||||
__ mv(t0, (intptr_t)dBuf);
|
||||
@ -279,22 +273,19 @@ void TemplateTable::dconst(int value)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::bipush()
|
||||
{
|
||||
void TemplateTable::bipush() {
|
||||
transition(vtos, itos);
|
||||
__ load_signed_byte(x10, at_bcp(1));
|
||||
}
|
||||
|
||||
void TemplateTable::sipush()
|
||||
{
|
||||
void TemplateTable::sipush() {
|
||||
transition(vtos, itos);
|
||||
__ load_unsigned_short(x10, at_bcp(1));
|
||||
__ revb_w_w(x10, x10);
|
||||
__ sraiw(x10, x10, 16);
|
||||
}
|
||||
|
||||
void TemplateTable::ldc(bool wide)
|
||||
{
|
||||
void TemplateTable::ldc(bool wide) {
|
||||
transition(vtos, vtos);
|
||||
Label call_ldc, notFloat, notClass, notInt, Done;
|
||||
|
||||
@ -363,8 +354,7 @@ void TemplateTable::ldc(bool wide)
|
||||
}
|
||||
|
||||
// Fast path for caching oop constants.
|
||||
void TemplateTable::fast_aldc(bool wide)
|
||||
{
|
||||
void TemplateTable::fast_aldc(bool wide) {
|
||||
transition(vtos, atos);
|
||||
|
||||
const Register result = x10;
|
||||
@ -399,7 +389,7 @@ void TemplateTable::fast_aldc(bool wide)
|
||||
int32_t offset = 0;
|
||||
__ movptr_with_offset(rarg, Universe::the_null_sentinel_addr(), offset);
|
||||
__ ld(tmp, Address(rarg, offset));
|
||||
__ resolve_oop_handle(tmp);
|
||||
__ resolve_oop_handle(tmp, x15, t1);
|
||||
__ bne(result, tmp, notNull);
|
||||
__ mv(result, zr); // NULL object reference
|
||||
__ bind(notNull);
|
||||
@ -411,8 +401,7 @@ void TemplateTable::fast_aldc(bool wide)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::ldc2_w()
|
||||
{
|
||||
void TemplateTable::ldc2_w() {
|
||||
transition(vtos, vtos);
|
||||
Label notDouble, notLong, Done;
|
||||
__ get_unsigned_2_byte_index_at_bcp(x10, 1);
|
||||
@ -448,8 +437,7 @@ void TemplateTable::ldc2_w()
|
||||
__ bind(Done);
|
||||
}
|
||||
|
||||
void TemplateTable::condy_helper(Label& Done)
|
||||
{
|
||||
void TemplateTable::condy_helper(Label& Done) {
|
||||
const Register obj = x10;
|
||||
const Register rarg = x11;
|
||||
const Register flags = x12;
|
||||
@ -558,8 +546,7 @@ void TemplateTable::condy_helper(Label& Done)
|
||||
__ stop("bad ldc/condy");
|
||||
}
|
||||
|
||||
void TemplateTable::locals_index(Register reg, int offset)
|
||||
{
|
||||
void TemplateTable::locals_index(Register reg, int offset) {
|
||||
__ lbu(reg, at_bcp(offset));
|
||||
__ neg(reg, reg);
|
||||
}
|
||||
@ -614,8 +601,7 @@ void TemplateTable::iload_internal(RewriteControl rc) {
|
||||
__ lw(x10, iaddress(x11, x10, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::fast_iload2()
|
||||
{
|
||||
void TemplateTable::fast_iload2() {
|
||||
transition(vtos, itos);
|
||||
locals_index(x11);
|
||||
__ lw(x10, iaddress(x11, x10, _masm));
|
||||
@ -624,15 +610,13 @@ void TemplateTable::fast_iload2()
|
||||
__ lw(x10, iaddress(x11, x10, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::fast_iload()
|
||||
{
|
||||
void TemplateTable::fast_iload() {
|
||||
transition(vtos, itos);
|
||||
locals_index(x11);
|
||||
__ lw(x10, iaddress(x11, x10, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::lload()
|
||||
{
|
||||
void TemplateTable::lload() {
|
||||
transition(vtos, ltos);
|
||||
__ lbu(x11, at_bcp(1));
|
||||
__ slli(x11, x11, LogBytesPerWord);
|
||||
@ -640,15 +624,13 @@ void TemplateTable::lload()
|
||||
__ ld(x10, Address(x11, Interpreter::local_offset_in_bytes(1)));
|
||||
}
|
||||
|
||||
void TemplateTable::fload()
|
||||
{
|
||||
void TemplateTable::fload() {
|
||||
transition(vtos, ftos);
|
||||
locals_index(x11);
|
||||
__ flw(f10, faddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::dload()
|
||||
{
|
||||
void TemplateTable::dload() {
|
||||
transition(vtos, dtos);
|
||||
__ lbu(x11, at_bcp(1));
|
||||
__ slli(x11, x11, LogBytesPerWord);
|
||||
@ -656,12 +638,10 @@ void TemplateTable::dload()
|
||||
__ fld(f10, Address(x11, Interpreter::local_offset_in_bytes(1)));
|
||||
}
|
||||
|
||||
void TemplateTable::aload()
|
||||
{
|
||||
void TemplateTable::aload() {
|
||||
transition(vtos, atos);
|
||||
locals_index(x11);
|
||||
__ ld(x10, iaddress(x11, x10, _masm));
|
||||
|
||||
}
|
||||
|
||||
void TemplateTable::locals_index_wide(Register reg) {
|
||||
@ -676,8 +656,7 @@ void TemplateTable::wide_iload() {
|
||||
__ lw(x10, iaddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::wide_lload()
|
||||
{
|
||||
void TemplateTable::wide_lload() {
|
||||
transition(vtos, ltos);
|
||||
__ lhu(x11, at_bcp(2));
|
||||
__ revb_h_h_u(x11, x11); // reverse bytes in half-word and zero-extend
|
||||
@ -686,15 +665,13 @@ void TemplateTable::wide_lload()
|
||||
__ ld(x10, Address(x11, Interpreter::local_offset_in_bytes(1)));
|
||||
}
|
||||
|
||||
void TemplateTable::wide_fload()
|
||||
{
|
||||
void TemplateTable::wide_fload() {
|
||||
transition(vtos, ftos);
|
||||
locals_index_wide(x11);
|
||||
__ flw(f10, faddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::wide_dload()
|
||||
{
|
||||
void TemplateTable::wide_dload() {
|
||||
transition(vtos, dtos);
|
||||
__ lhu(x11, at_bcp(2));
|
||||
__ revb_h_h_u(x11, x11); // reverse bytes in half-word and zero-extend
|
||||
@ -703,15 +680,13 @@ void TemplateTable::wide_dload()
|
||||
__ fld(f10, Address(x11, Interpreter::local_offset_in_bytes(1)));
|
||||
}
|
||||
|
||||
void TemplateTable::wide_aload()
|
||||
{
|
||||
void TemplateTable::wide_aload() {
|
||||
transition(vtos, atos);
|
||||
locals_index_wide(x11);
|
||||
__ ld(x10, aaddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::index_check(Register array, Register index)
|
||||
{
|
||||
void TemplateTable::index_check(Register array, Register index) {
|
||||
// destroys x11, t0
|
||||
// check array
|
||||
__ null_check(array, arrayOopDesc::length_offset_in_bytes());
|
||||
@ -732,8 +707,7 @@ void TemplateTable::index_check(Register array, Register index)
|
||||
__ bind(ok);
|
||||
}
|
||||
|
||||
void TemplateTable::iaload()
|
||||
{
|
||||
void TemplateTable::iaload() {
|
||||
transition(itos, itos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -746,8 +720,7 @@ void TemplateTable::iaload()
|
||||
__ addw(x10, x10, zr); // signed extended
|
||||
}
|
||||
|
||||
void TemplateTable::laload()
|
||||
{
|
||||
void TemplateTable::laload() {
|
||||
transition(itos, ltos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -759,8 +732,7 @@ void TemplateTable::laload()
|
||||
__ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::faload()
|
||||
{
|
||||
void TemplateTable::faload() {
|
||||
transition(itos, ftos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -772,8 +744,7 @@ void TemplateTable::faload()
|
||||
__ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::daload()
|
||||
{
|
||||
void TemplateTable::daload() {
|
||||
transition(itos, dtos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -785,8 +756,7 @@ void TemplateTable::daload()
|
||||
__ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::aaload()
|
||||
{
|
||||
void TemplateTable::aaload() {
|
||||
transition(itos, atos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -795,14 +765,10 @@ void TemplateTable::aaload()
|
||||
index_check(x10, x11); // leaves index in x11
|
||||
__ add(x11, x11, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
|
||||
__ shadd(x10, x11, x10, t0, LogBytesPerHeapOop);
|
||||
do_oop_load(_masm,
|
||||
Address(x10),
|
||||
x10,
|
||||
IS_ARRAY);
|
||||
do_oop_load(_masm, Address(x10), x10, IS_ARRAY);
|
||||
}
|
||||
|
||||
void TemplateTable::baload()
|
||||
{
|
||||
void TemplateTable::baload() {
|
||||
transition(itos, itos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -814,9 +780,8 @@ void TemplateTable::baload()
|
||||
__ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::caload()
|
||||
{
|
||||
transition(itos, itos);
|
||||
void TemplateTable::caload() {
|
||||
transition(itos, itos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
// x10: array
|
||||
@ -828,8 +793,7 @@ void TemplateTable::caload()
|
||||
}
|
||||
|
||||
// iload followed by caload frequent pair
|
||||
void TemplateTable::fast_icaload()
|
||||
{
|
||||
void TemplateTable::fast_icaload() {
|
||||
transition(vtos, itos);
|
||||
// load index out of locals
|
||||
locals_index(x12);
|
||||
@ -844,8 +808,7 @@ void TemplateTable::fast_icaload()
|
||||
__ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::saload()
|
||||
{
|
||||
void TemplateTable::saload() {
|
||||
transition(itos, itos);
|
||||
__ mv(x11, x10);
|
||||
__ pop_ptr(x10);
|
||||
@ -857,32 +820,27 @@ void TemplateTable::saload()
|
||||
__ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, x10, Address(x10), noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::iload(int n)
|
||||
{
|
||||
void TemplateTable::iload(int n) {
|
||||
transition(vtos, itos);
|
||||
__ lw(x10, iaddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::lload(int n)
|
||||
{
|
||||
void TemplateTable::lload(int n) {
|
||||
transition(vtos, ltos);
|
||||
__ ld(x10, laddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::fload(int n)
|
||||
{
|
||||
void TemplateTable::fload(int n) {
|
||||
transition(vtos, ftos);
|
||||
__ flw(f10, faddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::dload(int n)
|
||||
{
|
||||
void TemplateTable::dload(int n) {
|
||||
transition(vtos, dtos);
|
||||
__ fld(f10, daddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::aload(int n)
|
||||
{
|
||||
void TemplateTable::aload(int n) {
|
||||
transition(vtos, atos);
|
||||
__ ld(x10, iaddress(n));
|
||||
}
|
||||
@ -962,15 +920,13 @@ void TemplateTable::aload_0_internal(RewriteControl rc) {
|
||||
aload(0);
|
||||
}
|
||||
|
||||
void TemplateTable::istore()
|
||||
{
|
||||
void TemplateTable::istore() {
|
||||
transition(itos, vtos);
|
||||
locals_index(x11);
|
||||
__ sw(x10, iaddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::lstore()
|
||||
{
|
||||
void TemplateTable::lstore() {
|
||||
transition(ltos, vtos);
|
||||
locals_index(x11);
|
||||
__ sd(x10, laddress(x11, t0, _masm));
|
||||
@ -988,8 +944,7 @@ void TemplateTable::dstore() {
|
||||
__ fsd(f10, daddress(x11, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::astore()
|
||||
{
|
||||
void TemplateTable::astore() {
|
||||
transition(vtos, vtos);
|
||||
__ pop_ptr(x10);
|
||||
locals_index(x11);
|
||||
@ -1110,7 +1065,7 @@ void TemplateTable::aastore() {
|
||||
|
||||
// Generate subtype check. Blows x12, x15
|
||||
// Superklass in x10. Subklass in x11.
|
||||
__ gen_subtype_check(x11, ok_is_subtype); //todo
|
||||
__ gen_subtype_check(x11, ok_is_subtype);
|
||||
|
||||
// Come here on failure
|
||||
// object is at TOS
|
||||
@ -1135,11 +1090,9 @@ void TemplateTable::aastore() {
|
||||
// Pop stack arguments
|
||||
__ bind(done);
|
||||
__ add(esp, esp, 3 * Interpreter::stackElementSize);
|
||||
|
||||
}
|
||||
|
||||
void TemplateTable::bastore()
|
||||
{
|
||||
void TemplateTable::bastore() {
|
||||
transition(itos, vtos);
|
||||
__ pop_i(x11);
|
||||
__ pop_ptr(x13);
|
||||
@ -1164,8 +1117,7 @@ void TemplateTable::bastore()
|
||||
__ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY, Address(x11, 0), x10, noreg, noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::castore()
|
||||
{
|
||||
void TemplateTable::castore() {
|
||||
transition(itos, vtos);
|
||||
__ pop_i(x11);
|
||||
__ pop_ptr(x13);
|
||||
@ -1178,64 +1130,54 @@ void TemplateTable::castore()
|
||||
__ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY, Address(t0, 0), x10, noreg, noreg, noreg);
|
||||
}
|
||||
|
||||
void TemplateTable::sastore()
|
||||
{
|
||||
void TemplateTable::sastore() {
|
||||
castore();
|
||||
}
|
||||
|
||||
void TemplateTable::istore(int n)
|
||||
{
|
||||
void TemplateTable::istore(int n) {
|
||||
transition(itos, vtos);
|
||||
__ sd(x10, iaddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::lstore(int n)
|
||||
{
|
||||
void TemplateTable::lstore(int n) {
|
||||
transition(ltos, vtos);
|
||||
__ sd(x10, laddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::fstore(int n)
|
||||
{
|
||||
void TemplateTable::fstore(int n) {
|
||||
transition(ftos, vtos);
|
||||
__ fsw(f10, faddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::dstore(int n)
|
||||
{
|
||||
void TemplateTable::dstore(int n) {
|
||||
transition(dtos, vtos);
|
||||
__ fsd(f10, daddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::astore(int n)
|
||||
{
|
||||
void TemplateTable::astore(int n) {
|
||||
transition(vtos, vtos);
|
||||
__ pop_ptr(x10);
|
||||
__ sd(x10, iaddress(n));
|
||||
}
|
||||
|
||||
void TemplateTable::pop()
|
||||
{
|
||||
void TemplateTable::pop() {
|
||||
transition(vtos, vtos);
|
||||
__ addi(esp, esp, Interpreter::stackElementSize);
|
||||
}
|
||||
|
||||
void TemplateTable::pop2()
|
||||
{
|
||||
void TemplateTable::pop2() {
|
||||
transition(vtos, vtos);
|
||||
__ addi(esp, esp, 2 * Interpreter::stackElementSize);
|
||||
}
|
||||
|
||||
void TemplateTable::dup()
|
||||
{
|
||||
void TemplateTable::dup() {
|
||||
transition(vtos, vtos);
|
||||
__ ld(x10, Address(esp, 0));
|
||||
__ push_reg(x10);
|
||||
// stack: ..., a, a
|
||||
}
|
||||
|
||||
void TemplateTable::dup_x1()
|
||||
{
|
||||
void TemplateTable::dup_x1() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b
|
||||
__ ld(x10, at_tos()); // load b
|
||||
@ -1246,8 +1188,7 @@ void TemplateTable::dup_x1()
|
||||
// stack: ..., b, a, b
|
||||
}
|
||||
|
||||
void TemplateTable::dup_x2()
|
||||
{
|
||||
void TemplateTable::dup_x2() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b, c
|
||||
__ ld(x10, at_tos()); // load c
|
||||
@ -1262,8 +1203,7 @@ void TemplateTable::dup_x2()
|
||||
// stack: ..., c, a, b, c
|
||||
}
|
||||
|
||||
void TemplateTable::dup2()
|
||||
{
|
||||
void TemplateTable::dup2() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b
|
||||
__ ld(x10, at_tos_p1()); // load a
|
||||
@ -1273,8 +1213,7 @@ void TemplateTable::dup2()
|
||||
// stack: ..., a, b, a, b
|
||||
}
|
||||
|
||||
void TemplateTable::dup2_x1()
|
||||
{
|
||||
void TemplateTable::dup2_x1() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b, c
|
||||
__ ld(x12, at_tos()); // load c
|
||||
@ -1291,8 +1230,7 @@ void TemplateTable::dup2_x1()
|
||||
// stack: ..., b, c, a, b, c
|
||||
}
|
||||
|
||||
void TemplateTable::dup2_x2()
|
||||
{
|
||||
void TemplateTable::dup2_x2() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b, c, d
|
||||
__ ld(x12, at_tos()); // load d
|
||||
@ -1311,8 +1249,7 @@ void TemplateTable::dup2_x2()
|
||||
// stack: ..., c, d, a, b, c, d
|
||||
}
|
||||
|
||||
void TemplateTable::swap()
|
||||
{
|
||||
void TemplateTable::swap() {
|
||||
transition(vtos, vtos);
|
||||
// stack: ..., a, b
|
||||
__ ld(x12, at_tos_p1()); // load a
|
||||
@ -1322,8 +1259,7 @@ void TemplateTable::swap()
|
||||
// stack: ..., b, a
|
||||
}
|
||||
|
||||
void TemplateTable::iop2(Operation op)
|
||||
{
|
||||
void TemplateTable::iop2(Operation op) {
|
||||
transition(itos, itos);
|
||||
// x10 <== x11 op x10
|
||||
__ pop_i(x11);
|
||||
@ -1341,8 +1277,7 @@ void TemplateTable::iop2(Operation op)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::lop2(Operation op)
|
||||
{
|
||||
void TemplateTable::lop2(Operation op) {
|
||||
transition(ltos, ltos);
|
||||
// x10 <== x11 op x10
|
||||
__ pop_l(x11);
|
||||
@ -1357,8 +1292,7 @@ void TemplateTable::lop2(Operation op)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::idiv()
|
||||
{
|
||||
void TemplateTable::idiv() {
|
||||
transition(itos, itos);
|
||||
// explicitly check for div0
|
||||
Label no_div0;
|
||||
@ -1371,8 +1305,7 @@ void TemplateTable::idiv()
|
||||
__ corrected_idivl(x10, x11, x10, /* want_remainder */ false);
|
||||
}
|
||||
|
||||
void TemplateTable::irem()
|
||||
{
|
||||
void TemplateTable::irem() {
|
||||
transition(itos, itos);
|
||||
// explicitly check for div0
|
||||
Label no_div0;
|
||||
@ -1385,15 +1318,13 @@ void TemplateTable::irem()
|
||||
__ corrected_idivl(x10, x11, x10, /* want_remainder */ true);
|
||||
}
|
||||
|
||||
void TemplateTable::lmul()
|
||||
{
|
||||
void TemplateTable::lmul() {
|
||||
transition(ltos, ltos);
|
||||
__ pop_l(x11);
|
||||
__ mul(x10, x10, x11);
|
||||
}
|
||||
|
||||
void TemplateTable::ldiv()
|
||||
{
|
||||
void TemplateTable::ldiv() {
|
||||
transition(ltos, ltos);
|
||||
// explicitly check for div0
|
||||
Label no_div0;
|
||||
@ -1406,8 +1337,7 @@ void TemplateTable::ldiv()
|
||||
__ corrected_idivq(x10, x11, x10, /* want_remainder */ false);
|
||||
}
|
||||
|
||||
void TemplateTable::lrem()
|
||||
{
|
||||
void TemplateTable::lrem() {
|
||||
transition(ltos, ltos);
|
||||
// explicitly check for div0
|
||||
Label no_div0;
|
||||
@ -1420,32 +1350,28 @@ void TemplateTable::lrem()
|
||||
__ corrected_idivq(x10, x11, x10, /* want_remainder */ true);
|
||||
}
|
||||
|
||||
void TemplateTable::lshl()
|
||||
{
|
||||
void TemplateTable::lshl() {
|
||||
transition(itos, ltos);
|
||||
// shift count is in x10
|
||||
__ pop_l(x11);
|
||||
__ sll(x10, x11, x10);
|
||||
}
|
||||
|
||||
void TemplateTable::lshr()
|
||||
{
|
||||
void TemplateTable::lshr() {
|
||||
transition(itos, ltos);
|
||||
// shift count is in x10
|
||||
__ pop_l(x11);
|
||||
__ sra(x10, x11, x10);
|
||||
}
|
||||
|
||||
void TemplateTable::lushr()
|
||||
{
|
||||
void TemplateTable::lushr() {
|
||||
transition(itos, ltos);
|
||||
// shift count is in x10
|
||||
__ pop_l(x11);
|
||||
__ srl(x10, x11, x10);
|
||||
}
|
||||
|
||||
void TemplateTable::fop2(Operation op)
|
||||
{
|
||||
void TemplateTable::fop2(Operation op) {
|
||||
transition(ftos, ftos);
|
||||
switch (op) {
|
||||
case add:
|
||||
@ -1474,8 +1400,7 @@ void TemplateTable::fop2(Operation op)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::dop2(Operation op)
|
||||
{
|
||||
void TemplateTable::dop2(Operation op) {
|
||||
transition(dtos, dtos);
|
||||
switch (op) {
|
||||
case add:
|
||||
@ -1504,32 +1429,27 @@ void TemplateTable::dop2(Operation op)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::ineg()
|
||||
{
|
||||
void TemplateTable::ineg() {
|
||||
transition(itos, itos);
|
||||
__ negw(x10, x10);
|
||||
}
|
||||
|
||||
void TemplateTable::lneg()
|
||||
{
|
||||
void TemplateTable::lneg() {
|
||||
transition(ltos, ltos);
|
||||
__ neg(x10, x10);
|
||||
}
|
||||
|
||||
void TemplateTable::fneg()
|
||||
{
|
||||
void TemplateTable::fneg() {
|
||||
transition(ftos, ftos);
|
||||
__ fneg_s(f10, f10);
|
||||
}
|
||||
|
||||
void TemplateTable::dneg()
|
||||
{
|
||||
void TemplateTable::dneg() {
|
||||
transition(dtos, dtos);
|
||||
__ fneg_d(f10, f10);
|
||||
}
|
||||
|
||||
void TemplateTable::iinc()
|
||||
{
|
||||
void TemplateTable::iinc() {
|
||||
transition(vtos, vtos);
|
||||
__ load_signed_byte(x11, at_bcp(2)); // get constant
|
||||
locals_index(x12);
|
||||
@ -1538,8 +1458,7 @@ void TemplateTable::iinc()
|
||||
__ sd(x10, iaddress(x12, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::wide_iinc()
|
||||
{
|
||||
void TemplateTable::wide_iinc() {
|
||||
transition(vtos, vtos);
|
||||
__ lwu(x11, at_bcp(2)); // get constant and index
|
||||
__ revb_h_w_u(x11, x11); // reverse bytes in half-word (32bit) and zero-extend
|
||||
@ -1552,8 +1471,7 @@ void TemplateTable::wide_iinc()
|
||||
__ sd(x10, iaddress(x12, t0, _masm));
|
||||
}
|
||||
|
||||
void TemplateTable::convert()
|
||||
{
|
||||
void TemplateTable::convert() {
|
||||
// Checking
|
||||
#ifdef ASSERT
|
||||
{
|
||||
@ -1651,16 +1569,14 @@ void TemplateTable::convert()
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::lcmp()
|
||||
{
|
||||
void TemplateTable::lcmp() {
|
||||
transition(ltos, itos);
|
||||
__ pop_l(x11);
|
||||
__ cmp_l2i(t0, x11, x10);
|
||||
__ mv(x10, t0);
|
||||
}
|
||||
|
||||
void TemplateTable::float_cmp(bool is_float, int unordered_result)
|
||||
{
|
||||
void TemplateTable::float_cmp(bool is_float, int unordered_result) {
|
||||
// For instruction feq, flt and fle, the result is 0 if either operand is NaN
|
||||
if (is_float) {
|
||||
__ pop_f(f11);
|
||||
@ -1685,8 +1601,7 @@ void TemplateTable::float_cmp(bool is_float, int unordered_result)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::branch(bool is_jsr, bool is_wide)
|
||||
{
|
||||
void TemplateTable::branch(bool is_jsr, bool is_wide) {
|
||||
// We might be moving to a safepoint. The thread which calls
|
||||
// Interpreter::notice_safepoints() will effectively flush its cache
|
||||
// when it makes a system call, but we need to do something to
|
||||
@ -1841,8 +1756,7 @@ void TemplateTable::branch(bool is_jsr, bool is_wide)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::if_0cmp(Condition cc)
|
||||
{
|
||||
void TemplateTable::if_0cmp(Condition cc) {
|
||||
transition(itos, vtos);
|
||||
// assume branch is more often taken than not (loops use backward branches)
|
||||
Label not_taken;
|
||||
@ -1876,8 +1790,7 @@ void TemplateTable::if_0cmp(Condition cc)
|
||||
__ profile_not_taken_branch(x10);
|
||||
}
|
||||
|
||||
void TemplateTable::if_icmp(Condition cc)
|
||||
{
|
||||
void TemplateTable::if_icmp(Condition cc) {
|
||||
transition(itos, vtos);
|
||||
// assume branch is more often taken than not (loops use backward branches)
|
||||
Label not_taken;
|
||||
@ -1911,8 +1824,7 @@ void TemplateTable::if_icmp(Condition cc)
|
||||
__ profile_not_taken_branch(x10);
|
||||
}
|
||||
|
||||
void TemplateTable::if_nullcmp(Condition cc)
|
||||
{
|
||||
void TemplateTable::if_nullcmp(Condition cc) {
|
||||
transition(atos, vtos);
|
||||
// assume branch is more often taken than not (loops use backward branches)
|
||||
Label not_taken;
|
||||
@ -1926,8 +1838,7 @@ void TemplateTable::if_nullcmp(Condition cc)
|
||||
__ profile_not_taken_branch(x10);
|
||||
}
|
||||
|
||||
void TemplateTable::if_acmp(Condition cc)
|
||||
{
|
||||
void TemplateTable::if_acmp(Condition cc) {
|
||||
transition(atos, vtos);
|
||||
// assume branch is more often taken than not (loops use backward branches)
|
||||
Label not_taken;
|
||||
@ -2165,8 +2076,7 @@ void TemplateTable::fast_binaryswitch() {
|
||||
__ dispatch_only(vtos, /*generate_poll*/true);
|
||||
}
|
||||
|
||||
void TemplateTable::_return(TosState state)
|
||||
{
|
||||
void TemplateTable::_return(TosState state) {
|
||||
transition(state, state);
|
||||
assert(_desc->calls_vm(),
|
||||
"inconsistent calls_vm information"); // call in remove_activation
|
||||
@ -2301,7 +2211,7 @@ void TemplateTable::load_field_cp_cache_entry(Register obj,
|
||||
ConstantPoolCacheEntry::f1_offset())));
|
||||
const int mirror_offset = in_bytes(Klass::java_mirror_offset());
|
||||
__ ld(obj, Address(obj, mirror_offset));
|
||||
__ resolve_oop_handle(obj);
|
||||
__ resolve_oop_handle(obj, x15, t1);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2379,15 +2289,13 @@ void TemplateTable::jvmti_post_field_access(Register cache, Register index,
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::pop_and_check_object(Register r)
|
||||
{
|
||||
void TemplateTable::pop_and_check_object(Register r) {
|
||||
__ pop_ptr(r);
|
||||
__ null_check(r); // for field access must check obj.
|
||||
__ verify_oop(r);
|
||||
}
|
||||
|
||||
void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
|
||||
{
|
||||
void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
|
||||
const Register cache = x12;
|
||||
const Register index = x13;
|
||||
const Register obj = x14;
|
||||
@ -2546,8 +2454,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
|
||||
__ bind(notVolatile);
|
||||
}
|
||||
|
||||
void TemplateTable::getfield(int byte_no)
|
||||
{
|
||||
void TemplateTable::getfield(int byte_no) {
|
||||
getfield_or_static(byte_no, false);
|
||||
}
|
||||
|
||||
@ -2854,8 +2761,7 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::putfield(int byte_no)
|
||||
{
|
||||
void TemplateTable::putfield(int byte_no) {
|
||||
putfield_or_static(byte_no, false);
|
||||
}
|
||||
|
||||
@ -2867,8 +2773,7 @@ void TemplateTable::putstatic(int byte_no) {
|
||||
putfield_or_static(byte_no, true);
|
||||
}
|
||||
|
||||
void TemplateTable::jvmti_post_fast_field_mod()
|
||||
{
|
||||
void TemplateTable::jvmti_post_fast_field_mod() {
|
||||
if (JvmtiExport::can_post_field_modification()) {
|
||||
// Check to see if a field modification watch has been set before
|
||||
// we take the time to call into the VM.
|
||||
@ -2925,8 +2830,7 @@ void TemplateTable::jvmti_post_fast_field_mod()
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::fast_storefield(TosState state)
|
||||
{
|
||||
void TemplateTable::fast_storefield(TosState state) {
|
||||
transition(state, vtos);
|
||||
|
||||
ByteSize base = ConstantPoolCache::base_offset();
|
||||
@ -3003,8 +2907,7 @@ void TemplateTable::fast_storefield(TosState state)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::fast_accessfield(TosState state)
|
||||
{
|
||||
void TemplateTable::fast_accessfield(TosState state) {
|
||||
transition(atos, state);
|
||||
// Do the JVMTI work here to avoid disturbing the register state below
|
||||
if (JvmtiExport::can_post_field_access()) {
|
||||
@ -3087,8 +2990,7 @@ void TemplateTable::fast_accessfield(TosState state)
|
||||
}
|
||||
}
|
||||
|
||||
void TemplateTable::fast_xaccess(TosState state)
|
||||
{
|
||||
void TemplateTable::fast_xaccess(TosState state) {
|
||||
transition(vtos, state);
|
||||
|
||||
// get receiver
|
||||
@ -3210,8 +3112,7 @@ void TemplateTable::prepare_invoke(int byte_no,
|
||||
|
||||
void TemplateTable::invokevirtual_helper(Register index,
|
||||
Register recv,
|
||||
Register flags)
|
||||
{
|
||||
Register flags) {
|
||||
// Uses temporary registers x10, x13
|
||||
assert_different_registers(index, recv, x10, x13);
|
||||
// Test for an invoke of a final method
|
||||
@ -3249,8 +3150,7 @@ void TemplateTable::invokevirtual_helper(Register index,
|
||||
__ jump_from_interpreted(method);
|
||||
}
|
||||
|
||||
void TemplateTable::invokevirtual(int byte_no)
|
||||
{
|
||||
void TemplateTable::invokevirtual(int byte_no) {
|
||||
transition(vtos, vtos);
|
||||
assert(byte_no == f2_byte, "use this argument");
|
||||
|
||||
@ -3263,8 +3163,7 @@ void TemplateTable::invokevirtual(int byte_no)
|
||||
invokevirtual_helper(xmethod, x12, x13);
|
||||
}
|
||||
|
||||
void TemplateTable::invokespecial(int byte_no)
|
||||
{
|
||||
void TemplateTable::invokespecial(int byte_no) {
|
||||
transition(vtos, vtos);
|
||||
assert(byte_no == f1_byte, "use this argument");
|
||||
|
||||
@ -3278,8 +3177,7 @@ void TemplateTable::invokespecial(int byte_no)
|
||||
__ jump_from_interpreted(xmethod);
|
||||
}
|
||||
|
||||
void TemplateTable::invokestatic(int byte_no)
|
||||
{
|
||||
void TemplateTable::invokestatic(int byte_no) {
|
||||
transition(vtos, vtos);
|
||||
assert(byte_no == f1_byte, "use this argument");
|
||||
|
||||
@ -3290,8 +3188,7 @@ void TemplateTable::invokestatic(int byte_no)
|
||||
__ jump_from_interpreted(xmethod);
|
||||
}
|
||||
|
||||
void TemplateTable::fast_invokevfinal(int byte_no)
|
||||
{
|
||||
void TemplateTable::fast_invokevfinal(int byte_no) {
|
||||
__ call_Unimplemented();
|
||||
}
|
||||
|
||||
@ -3586,8 +3483,7 @@ void TemplateTable::arraylength() {
|
||||
__ lwu(x10, Address(x10, arrayOopDesc::length_offset_in_bytes()));
|
||||
}
|
||||
|
||||
void TemplateTable::checkcast()
|
||||
{
|
||||
void TemplateTable::checkcast() {
|
||||
transition(atos, atos);
|
||||
Label done, is_null, ok_is_subtype, quicked, resolved;
|
||||
__ beqz(x10, is_null);
|
||||
@ -3702,6 +3598,7 @@ void TemplateTable::instanceof() {
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Breakpoints
|
||||
|
||||
void TemplateTable::_breakpoint() {
|
||||
// Note: We get here even if we are single stepping..
|
||||
// jbug inists on setting breakpoints at every bytecode
|
||||
@ -3753,8 +3650,8 @@ void TemplateTable::athrow() {
|
||||
// [frame data ] <--- monitor block bot
|
||||
// ...
|
||||
// [saved fp ] <--- fp
|
||||
void TemplateTable::monitorenter()
|
||||
{
|
||||
|
||||
void TemplateTable::monitorenter() {
|
||||
transition(atos, vtos);
|
||||
|
||||
// check for NULL object
|
||||
@ -3852,8 +3749,7 @@ void TemplateTable::monitorenter()
|
||||
__ dispatch_next(vtos);
|
||||
}
|
||||
|
||||
void TemplateTable::monitorexit()
|
||||
{
|
||||
void TemplateTable::monitorexit() {
|
||||
transition(atos, vtos);
|
||||
|
||||
// check for NULL object
|
||||
@ -3891,7 +3787,7 @@ void TemplateTable::monitorexit()
|
||||
|
||||
// error handling. Unlocking was not block-structured
|
||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||
InterpreterRuntime::throw_illegal_monitor_state_exception));
|
||||
InterpreterRuntime::throw_illegal_monitor_state_exception));
|
||||
__ should_not_reach_here();
|
||||
|
||||
// call run-time routine
|
||||
@ -3902,8 +3798,7 @@ void TemplateTable::monitorexit()
|
||||
}
|
||||
|
||||
// Wide instructions
|
||||
void TemplateTable::wide()
|
||||
{
|
||||
void TemplateTable::wide() {
|
||||
__ load_unsigned_byte(x9, at_bcp(1));
|
||||
__ mv(t0, (address)Interpreter::_wentry_point);
|
||||
__ shadd(t0, x9, t0, t1, 3);
|
||||
|
Loading…
Reference in New Issue
Block a user