Merge
This commit is contained in:
commit
e6157688b4
@ -63,6 +63,8 @@ public class InstanceKlass extends Klass {
|
|||||||
private static int CLASS_STATE_FULLY_INITIALIZED;
|
private static int CLASS_STATE_FULLY_INITIALIZED;
|
||||||
private static int CLASS_STATE_INITIALIZATION_ERROR;
|
private static int CLASS_STATE_INITIALIZATION_ERROR;
|
||||||
|
|
||||||
|
private static int IS_MARKED_DEPENDENT_MASK;
|
||||||
|
|
||||||
private static synchronized void initialize(TypeDataBase db) throws WrongTypeException {
|
private static synchronized void initialize(TypeDataBase db) throws WrongTypeException {
|
||||||
Type type = db.lookupType("instanceKlass");
|
Type type = db.lookupType("instanceKlass");
|
||||||
arrayKlasses = new OopField(type.getOopField("_array_klasses"), Oop.getHeaderSize());
|
arrayKlasses = new OopField(type.getOopField("_array_klasses"), Oop.getHeaderSize());
|
||||||
@ -90,7 +92,7 @@ public class InstanceKlass extends Klass {
|
|||||||
staticFieldSize = new CIntField(type.getCIntegerField("_static_field_size"), Oop.getHeaderSize());
|
staticFieldSize = new CIntField(type.getCIntegerField("_static_field_size"), Oop.getHeaderSize());
|
||||||
staticOopFieldCount = new CIntField(type.getCIntegerField("_static_oop_field_count"), Oop.getHeaderSize());
|
staticOopFieldCount = new CIntField(type.getCIntegerField("_static_oop_field_count"), Oop.getHeaderSize());
|
||||||
nonstaticOopMapSize = new CIntField(type.getCIntegerField("_nonstatic_oop_map_size"), Oop.getHeaderSize());
|
nonstaticOopMapSize = new CIntField(type.getCIntegerField("_nonstatic_oop_map_size"), Oop.getHeaderSize());
|
||||||
isMarkedDependent = new CIntField(type.getCIntegerField("_is_marked_dependent"), Oop.getHeaderSize());
|
miscFlags = new CIntField(type.getCIntegerField("_misc_flags"), Oop.getHeaderSize());
|
||||||
initState = new CIntField(type.getCIntegerField("_init_state"), Oop.getHeaderSize());
|
initState = new CIntField(type.getCIntegerField("_init_state"), Oop.getHeaderSize());
|
||||||
vtableLen = new CIntField(type.getCIntegerField("_vtable_len"), Oop.getHeaderSize());
|
vtableLen = new CIntField(type.getCIntegerField("_vtable_len"), Oop.getHeaderSize());
|
||||||
itableLen = new CIntField(type.getCIntegerField("_itable_len"), Oop.getHeaderSize());
|
itableLen = new CIntField(type.getCIntegerField("_itable_len"), Oop.getHeaderSize());
|
||||||
@ -118,6 +120,8 @@ public class InstanceKlass extends Klass {
|
|||||||
CLASS_STATE_FULLY_INITIALIZED = db.lookupIntConstant("instanceKlass::fully_initialized").intValue();
|
CLASS_STATE_FULLY_INITIALIZED = db.lookupIntConstant("instanceKlass::fully_initialized").intValue();
|
||||||
CLASS_STATE_INITIALIZATION_ERROR = db.lookupIntConstant("instanceKlass::initialization_error").intValue();
|
CLASS_STATE_INITIALIZATION_ERROR = db.lookupIntConstant("instanceKlass::initialization_error").intValue();
|
||||||
|
|
||||||
|
IS_MARKED_DEPENDENT_MASK = db.lookupIntConstant("instanceKlass::IS_MARKED_DEPENDENT").intValue();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
InstanceKlass(OopHandle handle, ObjectHeap heap) {
|
InstanceKlass(OopHandle handle, ObjectHeap heap) {
|
||||||
@ -151,7 +155,7 @@ public class InstanceKlass extends Klass {
|
|||||||
private static CIntField staticFieldSize;
|
private static CIntField staticFieldSize;
|
||||||
private static CIntField staticOopFieldCount;
|
private static CIntField staticOopFieldCount;
|
||||||
private static CIntField nonstaticOopMapSize;
|
private static CIntField nonstaticOopMapSize;
|
||||||
private static CIntField isMarkedDependent;
|
private static CIntField miscFlags;
|
||||||
private static CIntField initState;
|
private static CIntField initState;
|
||||||
private static CIntField vtableLen;
|
private static CIntField vtableLen;
|
||||||
private static CIntField itableLen;
|
private static CIntField itableLen;
|
||||||
@ -333,7 +337,7 @@ public class InstanceKlass extends Klass {
|
|||||||
public long getNonstaticFieldSize() { return nonstaticFieldSize.getValue(this); }
|
public long getNonstaticFieldSize() { return nonstaticFieldSize.getValue(this); }
|
||||||
public long getStaticOopFieldCount() { return staticOopFieldCount.getValue(this); }
|
public long getStaticOopFieldCount() { return staticOopFieldCount.getValue(this); }
|
||||||
public long getNonstaticOopMapSize() { return nonstaticOopMapSize.getValue(this); }
|
public long getNonstaticOopMapSize() { return nonstaticOopMapSize.getValue(this); }
|
||||||
public boolean getIsMarkedDependent() { return isMarkedDependent.getValue(this) != 0; }
|
public boolean getIsMarkedDependent() { return (miscFlags.getValue(this) & IS_MARKED_DEPENDENT_MASK) != 0; }
|
||||||
public long getVtableLen() { return vtableLen.getValue(this); }
|
public long getVtableLen() { return vtableLen.getValue(this); }
|
||||||
public long getItableLen() { return itableLen.getValue(this); }
|
public long getItableLen() { return itableLen.getValue(this); }
|
||||||
public Symbol getGenericSignature() { return getSymbol(genericSignature); }
|
public Symbol getGenericSignature() { return getSymbol(genericSignature); }
|
||||||
@ -524,7 +528,7 @@ public class InstanceKlass extends Klass {
|
|||||||
visitor.doCInt(staticFieldSize, true);
|
visitor.doCInt(staticFieldSize, true);
|
||||||
visitor.doCInt(staticOopFieldCount, true);
|
visitor.doCInt(staticOopFieldCount, true);
|
||||||
visitor.doCInt(nonstaticOopMapSize, true);
|
visitor.doCInt(nonstaticOopMapSize, true);
|
||||||
visitor.doCInt(isMarkedDependent, true);
|
visitor.doCInt(miscFlags, true);
|
||||||
visitor.doCInt(initState, true);
|
visitor.doCInt(initState, true);
|
||||||
visitor.doCInt(vtableLen, true);
|
visitor.doCInt(vtableLen, true);
|
||||||
visitor.doCInt(itableLen, true);
|
visitor.doCInt(itableLen, true);
|
||||||
|
@ -2455,7 +2455,7 @@ void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
|||||||
op->obj()->as_register() == O0 &&
|
op->obj()->as_register() == O0 &&
|
||||||
op->klass()->as_register() == G5, "must be");
|
op->klass()->as_register() == G5, "must be");
|
||||||
if (op->init_check()) {
|
if (op->init_check()) {
|
||||||
__ ld(op->klass()->as_register(),
|
__ ldub(op->klass()->as_register(),
|
||||||
instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc),
|
instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc),
|
||||||
op->tmp1()->as_register());
|
op->tmp1()->as_register());
|
||||||
add_debug_info_for_null_check_here(op->stub()->info());
|
add_debug_info_for_null_check_here(op->stub()->info());
|
||||||
|
@ -398,7 +398,7 @@ OopMapSet* Runtime1::generate_code_for(StubID id, StubAssembler* sasm) {
|
|||||||
|
|
||||||
if (id == fast_new_instance_init_check_id) {
|
if (id == fast_new_instance_init_check_id) {
|
||||||
// make sure the klass is initialized
|
// make sure the klass is initialized
|
||||||
__ ld(G5_klass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_t1);
|
__ ldub(G5_klass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_t1);
|
||||||
__ cmp_and_br_short(G3_t1, instanceKlass::fully_initialized, Assembler::notEqual, Assembler::pn, slow_path);
|
__ cmp_and_br_short(G3_t1, instanceKlass::fully_initialized, Assembler::notEqual, Assembler::pn, slow_path);
|
||||||
}
|
}
|
||||||
#ifdef ASSERT
|
#ifdef ASSERT
|
||||||
|
@ -3414,6 +3414,9 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
generate_throw_exception("WrongMethodTypeException throw_exception",
|
generate_throw_exception("WrongMethodTypeException throw_exception",
|
||||||
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
||||||
G5_method_type, G3_method_handle);
|
G5_method_type, G3_method_handle);
|
||||||
|
|
||||||
|
// Build this early so it's available for the interpreter.
|
||||||
|
StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -3427,7 +3430,6 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
|
StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
|
||||||
StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
|
StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
|
||||||
StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
|
StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
|
||||||
StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
|
|
||||||
|
|
||||||
StubRoutines::_handler_for_unsafe_access_entry =
|
StubRoutines::_handler_for_unsafe_access_entry =
|
||||||
generate_handler_for_unsafe_access();
|
generate_handler_for_unsafe_access();
|
||||||
|
@ -396,7 +396,6 @@ void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rframe
|
|||||||
Register Rscratch,
|
Register Rscratch,
|
||||||
Register Rscratch2) {
|
Register Rscratch2) {
|
||||||
const int page_size = os::vm_page_size();
|
const int page_size = os::vm_page_size();
|
||||||
Address saved_exception_pc(G2_thread, JavaThread::saved_exception_pc_offset());
|
|
||||||
Label after_frame_check;
|
Label after_frame_check;
|
||||||
|
|
||||||
assert_different_registers(Rframe_size, Rscratch, Rscratch2);
|
assert_different_registers(Rframe_size, Rscratch, Rscratch2);
|
||||||
@ -436,11 +435,19 @@ void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rframe
|
|||||||
// the bottom of the stack
|
// the bottom of the stack
|
||||||
__ cmp_and_brx_short(SP, Rscratch, Assembler::greater, Assembler::pt, after_frame_check);
|
__ cmp_and_brx_short(SP, Rscratch, Assembler::greater, Assembler::pt, after_frame_check);
|
||||||
|
|
||||||
// Save the return address as the exception pc
|
|
||||||
__ st_ptr(O7, saved_exception_pc);
|
|
||||||
|
|
||||||
// the stack will overflow, throw an exception
|
// the stack will overflow, throw an exception
|
||||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_StackOverflowError));
|
|
||||||
|
// Note that SP is restored to sender's sp (in the delay slot). This
|
||||||
|
// is necessary if the sender's frame is an extended compiled frame
|
||||||
|
// (see gen_c2i_adapter()) and safer anyway in case of JSR292
|
||||||
|
// adaptations.
|
||||||
|
|
||||||
|
// Note also that the restored frame is not necessarily interpreted.
|
||||||
|
// Use the shared runtime version of the StackOverflowError.
|
||||||
|
assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "stub not yet generated");
|
||||||
|
AddressLiteral stub(StubRoutines::throw_StackOverflowError_entry());
|
||||||
|
__ jump_to(stub, Rscratch);
|
||||||
|
__ delayed()->mov(O5_savedSP, SP);
|
||||||
|
|
||||||
// if you get to here, then there is enough stack space
|
// if you get to here, then there is enough stack space
|
||||||
__ bind( after_frame_check );
|
__ bind( after_frame_check );
|
||||||
|
@ -3350,7 +3350,7 @@ void TemplateTable::_new() {
|
|||||||
__ ld_ptr(Rscratch, Roffset, RinstanceKlass);
|
__ ld_ptr(Rscratch, Roffset, RinstanceKlass);
|
||||||
|
|
||||||
// make sure klass is fully initialized:
|
// make sure klass is fully initialized:
|
||||||
__ ld(RinstanceKlass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_scratch);
|
__ ldub(RinstanceKlass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_scratch);
|
||||||
__ cmp(G3_scratch, instanceKlass::fully_initialized);
|
__ cmp(G3_scratch, instanceKlass::fully_initialized);
|
||||||
__ br(Assembler::notEqual, false, Assembler::pn, slow_case);
|
__ br(Assembler::notEqual, false, Assembler::pn, slow_case);
|
||||||
__ delayed()->ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
|
__ delayed()->ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
|
||||||
|
@ -1557,7 +1557,7 @@ void LIR_Assembler::emit_opConvert(LIR_OpConvert* op) {
|
|||||||
|
|
||||||
void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
|
||||||
if (op->init_check()) {
|
if (op->init_check()) {
|
||||||
__ cmpl(Address(op->klass()->as_register(),
|
__ cmpb(Address(op->klass()->as_register(),
|
||||||
instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)),
|
instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)),
|
||||||
instanceKlass::fully_initialized);
|
instanceKlass::fully_initialized);
|
||||||
add_debug_info_for_null_check_here(op->stub()->info());
|
add_debug_info_for_null_check_here(op->stub()->info());
|
||||||
|
@ -1011,7 +1011,7 @@ OopMapSet* Runtime1::generate_code_for(StubID id, StubAssembler* sasm) {
|
|||||||
|
|
||||||
if (id == fast_new_instance_init_check_id) {
|
if (id == fast_new_instance_init_check_id) {
|
||||||
// make sure the klass is initialized
|
// make sure the klass is initialized
|
||||||
__ cmpl(Address(klass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized);
|
__ cmpb(Address(klass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized);
|
||||||
__ jcc(Assembler::notEqual, slow_path);
|
__ jcc(Assembler::notEqual, slow_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2323,6 +2323,9 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
generate_throw_exception("WrongMethodTypeException throw_exception",
|
generate_throw_exception("WrongMethodTypeException throw_exception",
|
||||||
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
||||||
rax, rcx);
|
rax, rcx);
|
||||||
|
|
||||||
|
// Build this early so it's available for the interpreter
|
||||||
|
StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -2334,7 +2337,6 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
|
StubRoutines::_throw_AbstractMethodError_entry = generate_throw_exception("AbstractMethodError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
|
||||||
StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
|
StubRoutines::_throw_IncompatibleClassChangeError_entry= generate_throw_exception("IncompatibleClassChangeError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
|
||||||
StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
|
StubRoutines::_throw_NullPointerException_at_call_entry= generate_throw_exception("NullPointerException at call throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
|
||||||
StubRoutines::_throw_StackOverflowError_entry = generate_throw_exception("StackOverflowError throw_exception", CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
|
|
||||||
|
|
||||||
//------------------------------------------------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------------------------------------------------
|
||||||
// entry points that are platform specific
|
// entry points that are platform specific
|
||||||
|
@ -3072,6 +3072,13 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
generate_throw_exception("WrongMethodTypeException throw_exception",
|
generate_throw_exception("WrongMethodTypeException throw_exception",
|
||||||
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
CAST_FROM_FN_PTR(address, SharedRuntime::throw_WrongMethodTypeException),
|
||||||
rax, rcx);
|
rax, rcx);
|
||||||
|
|
||||||
|
// Build this early so it's available for the interpreter.
|
||||||
|
StubRoutines::_throw_StackOverflowError_entry =
|
||||||
|
generate_throw_exception("StackOverflowError throw_exception",
|
||||||
|
CAST_FROM_FN_PTR(address,
|
||||||
|
SharedRuntime::
|
||||||
|
throw_StackOverflowError));
|
||||||
}
|
}
|
||||||
|
|
||||||
void generate_all() {
|
void generate_all() {
|
||||||
@ -3098,12 +3105,6 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
SharedRuntime::
|
SharedRuntime::
|
||||||
throw_NullPointerException_at_call));
|
throw_NullPointerException_at_call));
|
||||||
|
|
||||||
StubRoutines::_throw_StackOverflowError_entry =
|
|
||||||
generate_throw_exception("StackOverflowError throw_exception",
|
|
||||||
CAST_FROM_FN_PTR(address,
|
|
||||||
SharedRuntime::
|
|
||||||
throw_StackOverflowError));
|
|
||||||
|
|
||||||
// entry points that are platform specific
|
// entry points that are platform specific
|
||||||
StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
|
StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
|
||||||
StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
|
StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
|
||||||
|
@ -522,9 +522,18 @@ void InterpreterGenerator::generate_stack_overflow_check(void) {
|
|||||||
|
|
||||||
__ pop(rsi); // get saved bcp / (c++ prev state ).
|
__ pop(rsi); // get saved bcp / (c++ prev state ).
|
||||||
|
|
||||||
__ pop(rax); // get return address
|
// Restore sender's sp as SP. This is necessary if the sender's
|
||||||
__ jump(ExternalAddress(Interpreter::throw_StackOverflowError_entry()));
|
// frame is an extended compiled frame (see gen_c2i_adapter())
|
||||||
|
// and safer anyway in case of JSR292 adaptations.
|
||||||
|
|
||||||
|
__ pop(rax); // return address must be moved if SP is changed
|
||||||
|
__ mov(rsp, rsi);
|
||||||
|
__ push(rax);
|
||||||
|
|
||||||
|
// Note: the restored frame is not necessarily interpreted.
|
||||||
|
// Use the shared runtime version of the StackOverflowError.
|
||||||
|
assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "stub not yet generated");
|
||||||
|
__ jump(ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
|
||||||
// all done with frame size check
|
// all done with frame size check
|
||||||
__ bind(after_frame_check_pop);
|
__ bind(after_frame_check_pop);
|
||||||
__ pop(rsi);
|
__ pop(rsi);
|
||||||
|
@ -467,8 +467,18 @@ void InterpreterGenerator::generate_stack_overflow_check(void) {
|
|||||||
__ cmpptr(rsp, rax);
|
__ cmpptr(rsp, rax);
|
||||||
__ jcc(Assembler::above, after_frame_check);
|
__ jcc(Assembler::above, after_frame_check);
|
||||||
|
|
||||||
__ pop(rax); // get return address
|
// Restore sender's sp as SP. This is necessary if the sender's
|
||||||
__ jump(ExternalAddress(Interpreter::throw_StackOverflowError_entry()));
|
// frame is an extended compiled frame (see gen_c2i_adapter())
|
||||||
|
// and safer anyway in case of JSR292 adaptations.
|
||||||
|
|
||||||
|
__ pop(rax); // return address must be moved if SP is changed
|
||||||
|
__ mov(rsp, r13);
|
||||||
|
__ push(rax);
|
||||||
|
|
||||||
|
// Note: the restored frame is not necessarily interpreted.
|
||||||
|
// Use the shared runtime version of the StackOverflowError.
|
||||||
|
assert(StubRoutines::throw_StackOverflowError_entry() != NULL, "stub not yet generated");
|
||||||
|
__ jump(ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
|
||||||
|
|
||||||
// all done with frame size check
|
// all done with frame size check
|
||||||
__ bind(after_frame_check);
|
__ bind(after_frame_check);
|
||||||
|
@ -3188,7 +3188,7 @@ void TemplateTable::_new() {
|
|||||||
|
|
||||||
// make sure klass is initialized & doesn't have finalizer
|
// make sure klass is initialized & doesn't have finalizer
|
||||||
// make sure klass is fully initialized
|
// make sure klass is fully initialized
|
||||||
__ cmpl(Address(rcx, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized);
|
__ cmpb(Address(rcx, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized);
|
||||||
__ jcc(Assembler::notEqual, slow_case);
|
__ jcc(Assembler::notEqual, slow_case);
|
||||||
|
|
||||||
// get instance_size in instanceKlass (scaled to a count of bytes)
|
// get instance_size in instanceKlass (scaled to a count of bytes)
|
||||||
|
@ -3235,7 +3235,7 @@ void TemplateTable::_new() {
|
|||||||
|
|
||||||
// make sure klass is initialized & doesn't have finalizer
|
// make sure klass is initialized & doesn't have finalizer
|
||||||
// make sure klass is fully initialized
|
// make sure klass is fully initialized
|
||||||
__ cmpl(Address(rsi,
|
__ cmpb(Address(rsi,
|
||||||
instanceKlass::init_state_offset_in_bytes() +
|
instanceKlass::init_state_offset_in_bytes() +
|
||||||
sizeof(oopDesc)),
|
sizeof(oopDesc)),
|
||||||
instanceKlass::fully_initialized);
|
instanceKlass::fully_initialized);
|
||||||
|
@ -54,7 +54,7 @@ ciInstanceKlass::ciInstanceKlass(KlassHandle h_k) :
|
|||||||
_flags = ciFlags(access_flags);
|
_flags = ciFlags(access_flags);
|
||||||
_has_finalizer = access_flags.has_finalizer();
|
_has_finalizer = access_flags.has_finalizer();
|
||||||
_has_subklass = ik->subklass() != NULL;
|
_has_subklass = ik->subklass() != NULL;
|
||||||
_init_state = (instanceKlass::ClassState)ik->get_init_state();
|
_init_state = ik->init_state();
|
||||||
_nonstatic_field_size = ik->nonstatic_field_size();
|
_nonstatic_field_size = ik->nonstatic_field_size();
|
||||||
_has_nonstatic_fields = ik->has_nonstatic_fields();
|
_has_nonstatic_fields = ik->has_nonstatic_fields();
|
||||||
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
|
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
|
||||||
@ -118,7 +118,7 @@ ciInstanceKlass::ciInstanceKlass(ciSymbol* name,
|
|||||||
void ciInstanceKlass::compute_shared_init_state() {
|
void ciInstanceKlass::compute_shared_init_state() {
|
||||||
GUARDED_VM_ENTRY(
|
GUARDED_VM_ENTRY(
|
||||||
instanceKlass* ik = get_instanceKlass();
|
instanceKlass* ik = get_instanceKlass();
|
||||||
_init_state = (instanceKlass::ClassState)ik->get_init_state();
|
_init_state = ik->init_state();
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1051,7 +1051,7 @@ static FieldAllocationType basic_type_to_atype(bool is_static, BasicType type) {
|
|||||||
|
|
||||||
class FieldAllocationCount: public ResourceObj {
|
class FieldAllocationCount: public ResourceObj {
|
||||||
public:
|
public:
|
||||||
unsigned int count[MAX_FIELD_ALLOCATION_TYPE];
|
u2 count[MAX_FIELD_ALLOCATION_TYPE];
|
||||||
|
|
||||||
FieldAllocationCount() {
|
FieldAllocationCount() {
|
||||||
for (int i = 0; i < MAX_FIELD_ALLOCATION_TYPE; i++) {
|
for (int i = 0; i < MAX_FIELD_ALLOCATION_TYPE; i++) {
|
||||||
@ -1061,6 +1061,8 @@ class FieldAllocationCount: public ResourceObj {
|
|||||||
|
|
||||||
FieldAllocationType update(bool is_static, BasicType type) {
|
FieldAllocationType update(bool is_static, BasicType type) {
|
||||||
FieldAllocationType atype = basic_type_to_atype(is_static, type);
|
FieldAllocationType atype = basic_type_to_atype(is_static, type);
|
||||||
|
// Make sure there is no overflow with injected fields.
|
||||||
|
assert(count[atype] < 0xFFFF, "More than 65535 fields");
|
||||||
count[atype]++;
|
count[atype]++;
|
||||||
return atype;
|
return atype;
|
||||||
}
|
}
|
||||||
@ -1071,7 +1073,7 @@ typeArrayHandle ClassFileParser::parse_fields(Symbol* class_name,
|
|||||||
constantPoolHandle cp, bool is_interface,
|
constantPoolHandle cp, bool is_interface,
|
||||||
FieldAllocationCount *fac,
|
FieldAllocationCount *fac,
|
||||||
objArrayHandle* fields_annotations,
|
objArrayHandle* fields_annotations,
|
||||||
int* java_fields_count_ptr, TRAPS) {
|
u2* java_fields_count_ptr, TRAPS) {
|
||||||
ClassFileStream* cfs = stream();
|
ClassFileStream* cfs = stream();
|
||||||
typeArrayHandle nullHandle;
|
typeArrayHandle nullHandle;
|
||||||
cfs->guarantee_more(2, CHECK_(nullHandle)); // length
|
cfs->guarantee_more(2, CHECK_(nullHandle)); // length
|
||||||
@ -2861,7 +2863,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
|
|||||||
local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, CHECK_(nullHandle));
|
local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, CHECK_(nullHandle));
|
||||||
}
|
}
|
||||||
|
|
||||||
int java_fields_count = 0;
|
u2 java_fields_count = 0;
|
||||||
// Fields (offsets are filled in later)
|
// Fields (offsets are filled in later)
|
||||||
FieldAllocationCount fac;
|
FieldAllocationCount fac;
|
||||||
objArrayHandle fields_annotations;
|
objArrayHandle fields_annotations;
|
||||||
|
@ -91,7 +91,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
|
|||||||
constantPoolHandle cp, bool is_interface,
|
constantPoolHandle cp, bool is_interface,
|
||||||
FieldAllocationCount *fac,
|
FieldAllocationCount *fac,
|
||||||
objArrayHandle* fields_annotations,
|
objArrayHandle* fields_annotations,
|
||||||
int* java_fields_count_ptr, TRAPS);
|
u2* java_fields_count_ptr, TRAPS);
|
||||||
|
|
||||||
// Method parsing
|
// Method parsing
|
||||||
methodHandle parse_method(constantPoolHandle cp, bool is_interface,
|
methodHandle parse_method(constantPoolHandle cp, bool is_interface,
|
||||||
|
@ -1631,7 +1631,7 @@ void KlassDepChange::initialize() {
|
|||||||
for (ContextStream str(*this); str.next(); ) {
|
for (ContextStream str(*this); str.next(); ) {
|
||||||
klassOop d = str.klass();
|
klassOop d = str.klass();
|
||||||
assert(!instanceKlass::cast(d)->is_marked_dependent(), "checking");
|
assert(!instanceKlass::cast(d)->is_marked_dependent(), "checking");
|
||||||
instanceKlass::cast(d)->set_is_marked_dependent(true);
|
instanceKlass::cast(d)->set_is_marked_dependent();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1640,7 +1640,7 @@ KlassDepChange::~KlassDepChange() {
|
|||||||
// Unmark transitive interfaces
|
// Unmark transitive interfaces
|
||||||
for (ContextStream str(*this); str.next(); ) {
|
for (ContextStream str(*this); str.next(); ) {
|
||||||
klassOop d = str.klass();
|
klassOop d = str.klass();
|
||||||
instanceKlass::cast(d)->set_is_marked_dependent(false);
|
instanceKlass::cast(d)->clear_is_marked_dependent();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1402,7 +1402,7 @@ class LinkClassesClosure : public ObjectClosure {
|
|||||||
instanceKlass* ik = (instanceKlass*) k;
|
instanceKlass* ik = (instanceKlass*) k;
|
||||||
// Link the class to cause the bytecodes to be rewritten and the
|
// Link the class to cause the bytecodes to be rewritten and the
|
||||||
// cpcache to be created.
|
// cpcache to be created.
|
||||||
if (ik->get_init_state() < instanceKlass::linked) {
|
if (ik->init_state() < instanceKlass::linked) {
|
||||||
ik->link_class(THREAD);
|
ik->link_class(THREAD);
|
||||||
guarantee(!HAS_PENDING_EXCEPTION, "exception in class rewriting");
|
guarantee(!HAS_PENDING_EXCEPTION, "exception in class rewriting");
|
||||||
}
|
}
|
||||||
@ -1535,7 +1535,7 @@ void GenCollectedHeap::preload_and_dump(TRAPS) {
|
|||||||
// are loaded in order that the related data structures (klass,
|
// are loaded in order that the related data structures (klass,
|
||||||
// cpCache, Sting constants) are located together.
|
// cpCache, Sting constants) are located together.
|
||||||
|
|
||||||
if (ik->get_init_state() < instanceKlass::linked) {
|
if (ik->init_state() < instanceKlass::linked) {
|
||||||
ik->link_class(THREAD);
|
ik->link_class(THREAD);
|
||||||
guarantee(!(HAS_PENDING_EXCEPTION), "exception in class rewriting");
|
guarantee(!(HAS_PENDING_EXCEPTION), "exception in class rewriting");
|
||||||
}
|
}
|
||||||
|
@ -208,7 +208,7 @@ void instanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
|
|||||||
// abort if someone beat us to the initialization
|
// abort if someone beat us to the initialization
|
||||||
if (!this_oop->is_not_initialized()) return; // note: not equivalent to is_initialized()
|
if (!this_oop->is_not_initialized()) return; // note: not equivalent to is_initialized()
|
||||||
|
|
||||||
ClassState old_state = this_oop->_init_state;
|
ClassState old_state = this_oop->init_state();
|
||||||
link_class_impl(this_oop, true, THREAD);
|
link_class_impl(this_oop, true, THREAD);
|
||||||
if (HAS_PENDING_EXCEPTION) {
|
if (HAS_PENDING_EXCEPTION) {
|
||||||
CLEAR_PENDING_EXCEPTION;
|
CLEAR_PENDING_EXCEPTION;
|
||||||
@ -2479,7 +2479,7 @@ void instanceKlass::set_init_state(ClassState state) {
|
|||||||
bool good_state = as_klassOop()->is_shared() ? (_init_state <= state)
|
bool good_state = as_klassOop()->is_shared() ? (_init_state <= state)
|
||||||
: (_init_state < state);
|
: (_init_state < state);
|
||||||
assert(good_state || state == allocated, "illegal state transition");
|
assert(good_state || state == allocated, "illegal state transition");
|
||||||
_init_state = state;
|
_init_state = (u1)state;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -227,16 +227,12 @@ class instanceKlass: public Klass {
|
|||||||
// (including inherited fields but after header_size()).
|
// (including inherited fields but after header_size()).
|
||||||
int _nonstatic_field_size;
|
int _nonstatic_field_size;
|
||||||
int _static_field_size; // number words used by static fields (oop and non-oop) in this klass
|
int _static_field_size; // number words used by static fields (oop and non-oop) in this klass
|
||||||
int _static_oop_field_count;// number of static oop fields in this klass
|
u2 _static_oop_field_count;// number of static oop fields in this klass
|
||||||
|
u2 _java_fields_count; // The number of declared Java fields
|
||||||
int _nonstatic_oop_map_size;// size in words of nonstatic oop map blocks
|
int _nonstatic_oop_map_size;// size in words of nonstatic oop map blocks
|
||||||
int _java_fields_count; // The number of declared Java fields
|
|
||||||
bool _is_marked_dependent; // used for marking during flushing and deoptimization
|
|
||||||
bool _rewritten; // methods rewritten.
|
|
||||||
bool _has_nonstatic_fields; // for sizing with UseCompressedOops
|
|
||||||
bool _should_verify_class; // allow caching of preverification
|
|
||||||
u2 _minor_version; // minor version number of class file
|
u2 _minor_version; // minor version number of class file
|
||||||
u2 _major_version; // major version number of class file
|
u2 _major_version; // major version number of class file
|
||||||
ClassState _init_state; // state of class
|
|
||||||
Thread* _init_thread; // Pointer to current thread doing initialization (to handle recusive initialization)
|
Thread* _init_thread; // Pointer to current thread doing initialization (to handle recusive initialization)
|
||||||
int _vtable_len; // length of Java vtable (in words)
|
int _vtable_len; // length of Java vtable (in words)
|
||||||
int _itable_len; // length of Java itable (in words)
|
int _itable_len; // length of Java itable (in words)
|
||||||
@ -260,6 +256,24 @@ class instanceKlass: public Klass {
|
|||||||
JvmtiCachedClassFieldMap* _jvmti_cached_class_field_map; // JVMTI: used during heap iteration
|
JvmtiCachedClassFieldMap* _jvmti_cached_class_field_map; // JVMTI: used during heap iteration
|
||||||
volatile u2 _idnum_allocated_count; // JNI/JVMTI: increments with the addition of methods, old ids don't change
|
volatile u2 _idnum_allocated_count; // JNI/JVMTI: increments with the addition of methods, old ids don't change
|
||||||
|
|
||||||
|
// Class states are defined as ClassState (see above).
|
||||||
|
// Place the _init_state here to utilize the unused 2-byte after
|
||||||
|
// _idnum_allocated_count.
|
||||||
|
u1 _init_state; // state of class
|
||||||
|
|
||||||
|
// Compact the following four boolean flags into 1-bit each. These four flags
|
||||||
|
// were defined as separate boolean fields and each was 1-byte before. Since
|
||||||
|
// there are 2 bytes unused after the _idnum_allocated_count field, place the
|
||||||
|
// _misc_flags field after _idnum_allocated_count to utilize the unused bits
|
||||||
|
// and save total 4-bytes.
|
||||||
|
enum {
|
||||||
|
IS_MARKED_DEPENDENT = 0x1, // used for marking during flushing and deoptimization
|
||||||
|
REWRITTEN = 0x2, // methods rewritten.
|
||||||
|
HAS_NONSTATIC_FIELDS = 0x4, // for sizing with UseCompressedOops
|
||||||
|
SHOULD_VERIFY_CLASS = 0x8 // allow caching of preverification
|
||||||
|
};
|
||||||
|
u1 _misc_flags;
|
||||||
|
|
||||||
// embedded Java vtable follows here
|
// embedded Java vtable follows here
|
||||||
// embedded Java itables follows here
|
// embedded Java itables follows here
|
||||||
// embedded static fields follows here
|
// embedded static fields follows here
|
||||||
@ -269,8 +283,14 @@ class instanceKlass: public Klass {
|
|||||||
friend class SystemDictionary;
|
friend class SystemDictionary;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
bool has_nonstatic_fields() const { return _has_nonstatic_fields; }
|
bool has_nonstatic_fields() const { return (_misc_flags & HAS_NONSTATIC_FIELDS) != 0; }
|
||||||
void set_has_nonstatic_fields(bool b) { _has_nonstatic_fields = b; }
|
void set_has_nonstatic_fields(bool b) {
|
||||||
|
if (b) {
|
||||||
|
_misc_flags |= HAS_NONSTATIC_FIELDS;
|
||||||
|
} else {
|
||||||
|
_misc_flags &= ~HAS_NONSTATIC_FIELDS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// field sizes
|
// field sizes
|
||||||
int nonstatic_field_size() const { return _nonstatic_field_size; }
|
int nonstatic_field_size() const { return _nonstatic_field_size; }
|
||||||
@ -279,8 +299,8 @@ class instanceKlass: public Klass {
|
|||||||
int static_field_size() const { return _static_field_size; }
|
int static_field_size() const { return _static_field_size; }
|
||||||
void set_static_field_size(int size) { _static_field_size = size; }
|
void set_static_field_size(int size) { _static_field_size = size; }
|
||||||
|
|
||||||
int static_oop_field_count() const { return _static_oop_field_count; }
|
int static_oop_field_count() const { return (int)_static_oop_field_count; }
|
||||||
void set_static_oop_field_count(int size) { _static_oop_field_count = size; }
|
void set_static_oop_field_count(u2 size) { _static_oop_field_count = size; }
|
||||||
|
|
||||||
// Java vtable
|
// Java vtable
|
||||||
int vtable_length() const { return _vtable_len; }
|
int vtable_length() const { return _vtable_len; }
|
||||||
@ -320,14 +340,14 @@ class instanceKlass: public Klass {
|
|||||||
Symbol* field_signature (int index) const { return field(index)->signature(constants()); }
|
Symbol* field_signature (int index) const { return field(index)->signature(constants()); }
|
||||||
|
|
||||||
// Number of Java declared fields
|
// Number of Java declared fields
|
||||||
int java_fields_count() const { return _java_fields_count; }
|
int java_fields_count() const { return (int)_java_fields_count; }
|
||||||
|
|
||||||
// Number of fields including any injected fields
|
// Number of fields including any injected fields
|
||||||
int all_fields_count() const { return _fields->length() / sizeof(FieldInfo::field_slots); }
|
int all_fields_count() const { return _fields->length() / sizeof(FieldInfo::field_slots); }
|
||||||
|
|
||||||
typeArrayOop fields() const { return _fields; }
|
typeArrayOop fields() const { return _fields; }
|
||||||
|
|
||||||
void set_fields(typeArrayOop f, int java_fields_count) {
|
void set_fields(typeArrayOop f, u2 java_fields_count) {
|
||||||
oop_store_without_check((oop*) &_fields, (oop) f);
|
oop_store_without_check((oop*) &_fields, (oop) f);
|
||||||
_java_fields_count = java_fields_count;
|
_java_fields_count = java_fields_count;
|
||||||
}
|
}
|
||||||
@ -377,16 +397,24 @@ class instanceKlass: public Klass {
|
|||||||
bool is_being_initialized() const { return _init_state == being_initialized; }
|
bool is_being_initialized() const { return _init_state == being_initialized; }
|
||||||
bool is_in_error_state() const { return _init_state == initialization_error; }
|
bool is_in_error_state() const { return _init_state == initialization_error; }
|
||||||
bool is_reentrant_initialization(Thread *thread) { return thread == _init_thread; }
|
bool is_reentrant_initialization(Thread *thread) { return thread == _init_thread; }
|
||||||
int get_init_state() { return _init_state; } // Useful for debugging
|
ClassState init_state() { return (ClassState)_init_state; }
|
||||||
bool is_rewritten() const { return _rewritten; }
|
bool is_rewritten() const { return (_misc_flags & REWRITTEN) != 0; }
|
||||||
|
|
||||||
// defineClass specified verification
|
// defineClass specified verification
|
||||||
bool should_verify_class() const { return _should_verify_class; }
|
bool should_verify_class() const { return (_misc_flags & SHOULD_VERIFY_CLASS) != 0; }
|
||||||
void set_should_verify_class(bool value) { _should_verify_class = value; }
|
void set_should_verify_class(bool value) {
|
||||||
|
if (value) {
|
||||||
|
_misc_flags |= SHOULD_VERIFY_CLASS;
|
||||||
|
} else {
|
||||||
|
_misc_flags &= ~SHOULD_VERIFY_CLASS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// marking
|
// marking
|
||||||
bool is_marked_dependent() const { return _is_marked_dependent; }
|
bool is_marked_dependent() const { return (_misc_flags & IS_MARKED_DEPENDENT) != 0; }
|
||||||
void set_is_marked_dependent(bool value) { _is_marked_dependent = value; }
|
void set_is_marked_dependent() { _misc_flags |= IS_MARKED_DEPENDENT; }
|
||||||
|
void clear_is_marked_dependent() { _misc_flags &= ~IS_MARKED_DEPENDENT; }
|
||||||
|
|
||||||
// initialization (virtuals from Klass)
|
// initialization (virtuals from Klass)
|
||||||
bool should_be_initialized() const; // means that initialize should be called
|
bool should_be_initialized() const; // means that initialize should be called
|
||||||
@ -754,9 +782,9 @@ private:
|
|||||||
#ifdef ASSERT
|
#ifdef ASSERT
|
||||||
void set_init_state(ClassState state);
|
void set_init_state(ClassState state);
|
||||||
#else
|
#else
|
||||||
void set_init_state(ClassState state) { _init_state = state; }
|
void set_init_state(ClassState state) { _init_state = (u1)state; }
|
||||||
#endif
|
#endif
|
||||||
void set_rewritten() { _rewritten = true; }
|
void set_rewritten() { _misc_flags |= REWRITTEN; }
|
||||||
void set_init_thread(Thread *thread) { _init_thread = thread; }
|
void set_init_thread(Thread *thread) { _init_thread = thread; }
|
||||||
|
|
||||||
u2 idnum_allocated_count() const { return _idnum_allocated_count; }
|
u2 idnum_allocated_count() const { return _idnum_allocated_count; }
|
||||||
|
@ -399,7 +399,7 @@ instanceKlassKlass::allocate_instance_klass(Symbol* name, int vtable_len, int it
|
|||||||
ik->set_inner_classes(NULL);
|
ik->set_inner_classes(NULL);
|
||||||
ik->set_static_oop_field_count(0);
|
ik->set_static_oop_field_count(0);
|
||||||
ik->set_nonstatic_field_size(0);
|
ik->set_nonstatic_field_size(0);
|
||||||
ik->set_is_marked_dependent(false);
|
ik->clear_is_marked_dependent();
|
||||||
ik->set_init_state(instanceKlass::allocated);
|
ik->set_init_state(instanceKlass::allocated);
|
||||||
ik->set_init_thread(NULL);
|
ik->set_init_thread(NULL);
|
||||||
ik->set_reference_type(rt);
|
ik->set_reference_type(rt);
|
||||||
|
@ -2807,7 +2807,9 @@ bool LibraryCallKit::inline_unsafe_allocate() {
|
|||||||
// Serializable.class or Object[].class. The runtime will handle it.
|
// Serializable.class or Object[].class. The runtime will handle it.
|
||||||
// But we must make an explicit check for initialization.
|
// But we must make an explicit check for initialization.
|
||||||
Node* insp = basic_plus_adr(kls, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc));
|
Node* insp = basic_plus_adr(kls, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc));
|
||||||
Node* inst = make_load(NULL, insp, TypeInt::INT, T_INT);
|
// Use T_BOOLEAN for instanceKlass::_init_state so the compiler
|
||||||
|
// can generate code to load it as unsigned byte.
|
||||||
|
Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN);
|
||||||
Node* bits = intcon(instanceKlass::fully_initialized);
|
Node* bits = intcon(instanceKlass::fully_initialized);
|
||||||
Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
|
Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
|
||||||
// The 'test' is non-zero if we need to take a slow path.
|
// The 'test' is non-zero if we need to take a slow path.
|
||||||
|
@ -230,7 +230,9 @@ void Parse::emit_guard_for_new(ciInstanceKlass* klass) {
|
|||||||
|
|
||||||
Node* init_state_offset = _gvn.MakeConX(instanceKlass::init_state_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes());
|
Node* init_state_offset = _gvn.MakeConX(instanceKlass::init_state_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes());
|
||||||
adr_node = basic_plus_adr(kls, kls, init_state_offset);
|
adr_node = basic_plus_adr(kls, kls, init_state_offset);
|
||||||
Node* init_state = make_load(NULL, adr_node, TypeInt::INT, T_INT);
|
// Use T_BOOLEAN for instanceKlass::_init_state so the compiler
|
||||||
|
// can generate code to load it as unsigned byte.
|
||||||
|
Node* init_state = make_load(NULL, adr_node, TypeInt::UBYTE, T_BOOLEAN);
|
||||||
Node* being_init = _gvn.intcon(instanceKlass::being_initialized);
|
Node* being_init = _gvn.intcon(instanceKlass::being_initialized);
|
||||||
tst = Bool( CmpI( init_state, being_init), BoolTest::eq);
|
tst = Bool( CmpI( init_state, being_init), BoolTest::eq);
|
||||||
iff = create_and_map_if(control(), tst, PROB_ALWAYS, COUNT_UNKNOWN);
|
iff = create_and_map_if(control(), tst, PROB_ALWAYS, COUNT_UNKNOWN);
|
||||||
|
@ -527,7 +527,21 @@ void Monitor::ILock (Thread * Self) {
|
|||||||
|
|
||||||
void Monitor::IUnlock (bool RelaxAssert) {
|
void Monitor::IUnlock (bool RelaxAssert) {
|
||||||
assert (ILocked(), "invariant") ;
|
assert (ILocked(), "invariant") ;
|
||||||
_LockWord.Bytes[_LSBINDEX] = 0 ; // drop outer lock
|
// Conceptually we need a MEMBAR #storestore|#loadstore barrier or fence immediately
|
||||||
|
// before the store that releases the lock. Crucially, all the stores and loads in the
|
||||||
|
// critical section must be globally visible before the store of 0 into the lock-word
|
||||||
|
// that releases the lock becomes globally visible. That is, memory accesses in the
|
||||||
|
// critical section should not be allowed to bypass or overtake the following ST that
|
||||||
|
// releases the lock. As such, to prevent accesses within the critical section
|
||||||
|
// from "leaking" out, we need a release fence between the critical section and the
|
||||||
|
// store that releases the lock. In practice that release barrier is elided on
|
||||||
|
// platforms with strong memory models such as TSO.
|
||||||
|
//
|
||||||
|
// Note that the OrderAccess::storeload() fence that appears after unlock store
|
||||||
|
// provides for progress conditions and succession and is _not related to exclusion
|
||||||
|
// safety or lock release consistency.
|
||||||
|
OrderAccess::release_store(&_LockWord.Bytes[_LSBINDEX], 0); // drop outer lock
|
||||||
|
|
||||||
OrderAccess::storeload ();
|
OrderAccess::storeload ();
|
||||||
ParkEvent * const w = _OnDeck ;
|
ParkEvent * const w = _OnDeck ;
|
||||||
assert (RelaxAssert || w != Thread::current()->_MutexEvent, "invariant") ;
|
assert (RelaxAssert || w != Thread::current()->_MutexEvent, "invariant") ;
|
||||||
|
@ -295,7 +295,7 @@ static inline uint64_t cast_uint64_t(size_t x)
|
|||||||
nonstatic_field(instanceKlass, _nof_implementors, int) \
|
nonstatic_field(instanceKlass, _nof_implementors, int) \
|
||||||
nonstatic_field(instanceKlass, _implementors[0], klassOop) \
|
nonstatic_field(instanceKlass, _implementors[0], klassOop) \
|
||||||
nonstatic_field(instanceKlass, _fields, typeArrayOop) \
|
nonstatic_field(instanceKlass, _fields, typeArrayOop) \
|
||||||
nonstatic_field(instanceKlass, _java_fields_count, int) \
|
nonstatic_field(instanceKlass, _java_fields_count, u2) \
|
||||||
nonstatic_field(instanceKlass, _constants, constantPoolOop) \
|
nonstatic_field(instanceKlass, _constants, constantPoolOop) \
|
||||||
nonstatic_field(instanceKlass, _class_loader, oop) \
|
nonstatic_field(instanceKlass, _class_loader, oop) \
|
||||||
nonstatic_field(instanceKlass, _protection_domain, oop) \
|
nonstatic_field(instanceKlass, _protection_domain, oop) \
|
||||||
@ -305,12 +305,12 @@ static inline uint64_t cast_uint64_t(size_t x)
|
|||||||
nonstatic_field(instanceKlass, _inner_classes, typeArrayOop) \
|
nonstatic_field(instanceKlass, _inner_classes, typeArrayOop) \
|
||||||
nonstatic_field(instanceKlass, _nonstatic_field_size, int) \
|
nonstatic_field(instanceKlass, _nonstatic_field_size, int) \
|
||||||
nonstatic_field(instanceKlass, _static_field_size, int) \
|
nonstatic_field(instanceKlass, _static_field_size, int) \
|
||||||
nonstatic_field(instanceKlass, _static_oop_field_count, int) \
|
nonstatic_field(instanceKlass, _static_oop_field_count, u2) \
|
||||||
nonstatic_field(instanceKlass, _nonstatic_oop_map_size, int) \
|
nonstatic_field(instanceKlass, _nonstatic_oop_map_size, int) \
|
||||||
nonstatic_field(instanceKlass, _is_marked_dependent, bool) \
|
nonstatic_field(instanceKlass, _misc_flags, u1) \
|
||||||
nonstatic_field(instanceKlass, _minor_version, u2) \
|
nonstatic_field(instanceKlass, _minor_version, u2) \
|
||||||
nonstatic_field(instanceKlass, _major_version, u2) \
|
nonstatic_field(instanceKlass, _major_version, u2) \
|
||||||
nonstatic_field(instanceKlass, _init_state, instanceKlass::ClassState) \
|
nonstatic_field(instanceKlass, _init_state, u1) \
|
||||||
nonstatic_field(instanceKlass, _init_thread, Thread*) \
|
nonstatic_field(instanceKlass, _init_thread, Thread*) \
|
||||||
nonstatic_field(instanceKlass, _vtable_len, int) \
|
nonstatic_field(instanceKlass, _vtable_len, int) \
|
||||||
nonstatic_field(instanceKlass, _itable_len, int) \
|
nonstatic_field(instanceKlass, _itable_len, int) \
|
||||||
@ -1362,6 +1362,7 @@ static inline uint64_t cast_uint64_t(size_t x)
|
|||||||
/* The compiler thinks this is a different type than */ \
|
/* The compiler thinks this is a different type than */ \
|
||||||
/* unsigned short on Win32 */ \
|
/* unsigned short on Win32 */ \
|
||||||
declare_unsigned_integer_type(u2) \
|
declare_unsigned_integer_type(u2) \
|
||||||
|
declare_unsigned_integer_type(u1) \
|
||||||
declare_unsigned_integer_type(unsigned) \
|
declare_unsigned_integer_type(unsigned) \
|
||||||
\
|
\
|
||||||
/*****************************/ \
|
/*****************************/ \
|
||||||
@ -2385,6 +2386,7 @@ static inline uint64_t cast_uint64_t(size_t x)
|
|||||||
declare_constant(instanceKlass::being_initialized) \
|
declare_constant(instanceKlass::being_initialized) \
|
||||||
declare_constant(instanceKlass::fully_initialized) \
|
declare_constant(instanceKlass::fully_initialized) \
|
||||||
declare_constant(instanceKlass::initialization_error) \
|
declare_constant(instanceKlass::initialization_error) \
|
||||||
|
declare_constant(instanceKlass::IS_MARKED_DEPENDENT) \
|
||||||
\
|
\
|
||||||
/*********************************/ \
|
/*********************************/ \
|
||||||
/* Symbol* - symbol max length */ \
|
/* Symbol* - symbol max length */ \
|
||||||
|
1329
hotspot/test/compiler/7116216/LargeFrame.java
Normal file
1329
hotspot/test/compiler/7116216/LargeFrame.java
Normal file
File diff suppressed because it is too large
Load Diff
64
hotspot/test/compiler/7116216/StackOverflow.java
Normal file
64
hotspot/test/compiler/7116216/StackOverflow.java
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
|
||||||
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
|
*
|
||||||
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
* under the terms of the GNU General Public License version 2 only, as
|
||||||
|
* published by the Free Software Foundation.
|
||||||
|
*
|
||||||
|
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||||
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||||
|
* version 2 for more details (a copy is included in the LICENSE file that
|
||||||
|
* accompanied this code).
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License version
|
||||||
|
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||||
|
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
*
|
||||||
|
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||||
|
* or visit www.oracle.com if you need additional information or have any
|
||||||
|
* questions.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @test
|
||||||
|
* @bug 7116216
|
||||||
|
* @summary The vm crashes when GC happens during throwing a StackOverflow exception
|
||||||
|
*
|
||||||
|
* @run main/othervm -Xcomp -Xbatch StackOverflow
|
||||||
|
*/
|
||||||
|
|
||||||
|
class StackOverflow {
|
||||||
|
static String stackOverflow_largeFrame_liveOopForGC;
|
||||||
|
|
||||||
|
public static int stackOverflow_largeFrame(int call_count, String liveOopForGC) {
|
||||||
|
try {
|
||||||
|
int return_count = stackOverflow_largeFrame(++call_count, liveOopForGC);
|
||||||
|
if (return_count == 0) {
|
||||||
|
try {
|
||||||
|
LargeFrame.method_with_many_locals(liveOopForGC, 2,3,4,5,6,7,liveOopForGC);
|
||||||
|
} catch (StackOverflowError e2) {
|
||||||
|
// access liveOopForGC to make it a live variable
|
||||||
|
stackOverflow_largeFrame_liveOopForGC = liveOopForGC;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return return_count - 1;
|
||||||
|
} catch (StackOverflowError e) {
|
||||||
|
// Return a value that is large enough such that no unrecoverable
|
||||||
|
// stack overflow will occur afterwards, but that is small enough
|
||||||
|
// such that calling LargeFrame.method_with_many_locals() will
|
||||||
|
// cause a StackOverflowError.
|
||||||
|
// Don't use a call here because we're out of stack space anyway!
|
||||||
|
int tmp = call_count / 2;
|
||||||
|
return (tmp < 100 ? tmp : 100);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
public static void main(String args[]) {
|
||||||
|
LargeFrame.method_with_many_locals(new Object(), 2,3,4,5,6,7,new Object());
|
||||||
|
|
||||||
|
stackOverflow_largeFrame(0, "this is a live oop to test GC");
|
||||||
|
System.out.println("finished ok!");
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user