This commit is contained in:
J. Duke 2017-07-05 17:24:54 +02:00
commit 99b68847cf
155 changed files with 3120 additions and 5794 deletions

View File

@ -1,3 +1,4 @@
^build/ ^build/
^dist/ ^dist/
/nbproject/private/ /nbproject/private/
^webrev

View File

@ -88,3 +88,4 @@ f8be576feefce0c6695f188ef97ec16b73ad9cfd jdk7-b104
9702d6fef68e17533ee7fcf5923b11ead3e912ce jdk7-b111 9702d6fef68e17533ee7fcf5923b11ead3e912ce jdk7-b111
b852103caf73da70068473777ae867a457bb3ae1 jdk7-b112 b852103caf73da70068473777ae867a457bb3ae1 jdk7-b112
c1df968c4527bfab5f97662a89245f15d12d378b jdk7-b113 c1df968c4527bfab5f97662a89245f15d12d378b jdk7-b113
27985a5c6e5268014d25d55886e0ecb96af4763d jdk7-b114

View File

@ -88,3 +88,4 @@ c3dd858e09b20206459d9e7b0ead99d27ab00eab jdk7-b109
640fa4d4e2ad4c2d7e4815c955026740d8c52b7a jdk7-b111 640fa4d4e2ad4c2d7e4815c955026740d8c52b7a jdk7-b111
cc67fdc4fee9a5b25caee4e71b51a8ff24ae7d1a jdk7-b112 cc67fdc4fee9a5b25caee4e71b51a8ff24ae7d1a jdk7-b112
a89a6c5be9d1a754868d3d359cbf7ad36aa95631 jdk7-b113 a89a6c5be9d1a754868d3d359cbf7ad36aa95631 jdk7-b113
88fddb73c5c4a4b50c319cbae9380caf5172ab45 jdk7-b114

View File

@ -123,3 +123,5 @@ cc4bb3022b3144dc5db0805b9ef6c7eff2aa3b81 jdk7-b109
07b042e13dde4f3479ba9ec55120fcd5e8623323 jdk7-b111 07b042e13dde4f3479ba9ec55120fcd5e8623323 jdk7-b111
5511edd5d719f3fc9fdd04879482026a3d2c8652 jdk7-b112 5511edd5d719f3fc9fdd04879482026a3d2c8652 jdk7-b112
beef35b96b81129c375d572357fb9548d9020db1 jdk7-b113 beef35b96b81129c375d572357fb9548d9020db1 jdk7-b113
68d6141ea19de3a9ba98ef753f0da41a61f736a0 jdk7-b114
5511edd5d719f3fc9fdd04879482026a3d2c8652 hs20-b01

View File

@ -35,7 +35,7 @@ HOTSPOT_VM_COPYRIGHT=Copyright 2010
HS_MAJOR_VER=20 HS_MAJOR_VER=20
HS_MINOR_VER=0 HS_MINOR_VER=0
HS_BUILD_NUMBER=01 HS_BUILD_NUMBER=02
JDK_MAJOR_VER=1 JDK_MAJOR_VER=1
JDK_MINOR_VER=7 JDK_MINOR_VER=7

View File

@ -15,5 +15,6 @@ fix_lines() {
' F2=$2 ' F2=$2
mv $1+ $1 mv $1+ $1
} }
[ -f $3/$1 ] && (fix_lines $2/$1 $3/$1; cmp -s $2/$1 $3/$1) || \ fix_lines $2/$1 $3/$1
[ -f $3/$1 ] && cmp -s $2/$1 $3/$1 || \
( [ -f $3/$1 ] && echo Updating $3/$1 ; touch $2/made-change ; mv $2/$1 $3/$1 ) ( [ -f $3/$1 ] && echo Updating $3/$1 ; touch $2/made-change ; mv $2/$1 $3/$1 )

View File

@ -15,5 +15,6 @@ fix_lines() {
' F2=$2 ' F2=$2
mv $1+ $1 mv $1+ $1
} }
[ -f $3/$1 ] && (fix_lines $2/$1 $3/$1; cmp -s $2/$1 $3/$1) || \ fix_lines $2/$1 $3/$1
[ -f $3/$1 ] && cmp -s $2/$1 $3/$1 || \
( [ -f $3/$1 ] && echo Updating $3/$1 ; touch $2/made-change ; mv $2/$1 $3/$1 ) ( [ -f $3/$1 ] && echo Updating $3/$1 ; touch $2/made-change ; mv $2/$1 $3/$1 )

View File

@ -35,7 +35,8 @@ ifeq ("${Platform_compiler}", "sparcWorks")
# Temporary until SS10 C++ compiler is fixed # Temporary until SS10 C++ compiler is fixed
OPT_CFLAGS/generateOptoStub.o = -xO2 OPT_CFLAGS/generateOptoStub.o = -xO2
# Temporary util SS12u1 C++ compiler is fixed
OPT_CFLAGS/c1_LinearScan.o = -xO2
else else
ifeq ("${Platform_compiler}", "gcc") ifeq ("${Platform_compiler}", "gcc")

View File

@ -18,7 +18,6 @@ text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: compilationPolicy.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: compileBroker.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: compileBroker.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: fprofiler.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: fprofiler.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: genCollectedHeap.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: genCollectedHeap.o;
text: .text%__1cTAssertIsPermClosure2t6M_v_: genCollectedHeap.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: generateOopMap.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: generateOopMap.o;
text: .text%__1cNCellTypeStateLmake_bottom6F_0_: generateOopMap.o; text: .text%__1cNCellTypeStateLmake_bottom6F_0_: generateOopMap.o;
text: .text%__1cNCellTypeStateImake_any6Fi_0_: generateOopMap.o; text: .text%__1cNCellTypeStateImake_any6Fi_0_: generateOopMap.o;
@ -34,23 +33,15 @@ text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: jvmtiTagMap.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: management.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: management.o;
text: .text%__1cJTimeStamp2t6M_v_: management.o; text: .text%__1cJTimeStamp2t6M_v_: management.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: markSweep.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: markSweep.o;
text: .text%__1cJMarkSweepSMarkAndPushClosure2t6M_v_: markSweep.o;
text: .text%__1cJMarkSweepRFollowRootClosure2t6M_v_: markSweep.o;
text: .text%__1cJMarkSweepSFollowStackClosure2t6M_v_: markSweep.o;
text: .text%__1cJMarkSweepOIsAliveClosure2t6M_v_: markSweep.o;
text: .text%__1cJMarkSweepQKeepAliveClosure2t6M_v_: markSweep.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: memoryService.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: memoryService.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: oopMap.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: oopMap.o;
text: .text%__1cQDoNothingClosure2t6M_v_: oopMap.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: parGCAllocBuffer.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: parGCAllocBuffer.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: psMarkSweep.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: psMarkSweep.o;
text: .text%__1cTPSAlwaysTrueClosure2t6M_v_: psMarkSweep.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: psScavenge.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: psScavenge.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: relocInfo.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: relocInfo.o;
text: .text%__1cQRelocationHolder2t6M_v_: relocInfo.o; text: .text%__1cQRelocationHolder2t6M_v_: relocInfo.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: runtimeService.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: runtimeService.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: sharedHeap.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: sharedHeap.o;
text: .text%__1cTAssertIsPermClosure2t6M_v_: sharedHeap.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: vmStructs.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: vmStructs.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: vm_version.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: vm_version.o;
text: .text%__1cTAbstract_VM_VersionKvm_release6F_pkc_; text: .text%__1cTAbstract_VM_VersionKvm_release6F_pkc_;
@ -94,7 +85,6 @@ text: .text%__1cCosXnon_memory_address_word6F_pc_;
text: .text%__1cCosHSolarisOlibthread_init6F_v_; text: .text%__1cCosHSolarisOlibthread_init6F_v_;
text: .text%__1cRlwp_priocntl_init6F_i_: os_solaris.o; text: .text%__1cRlwp_priocntl_init6F_i_: os_solaris.o;
text: .text%__1cNpriocntl_stub6FinGidtype_lipc_l_: os_solaris.o; text: .text%__1cNpriocntl_stub6FinGidtype_lipc_l_: os_solaris.o;
text: .text%__1cOresolve_symbol6Fpkc_pC_: os_solaris.o;
text: .text%__1cCosHSolarisQsignal_sets_init6F_v_; text: .text%__1cCosHSolarisQsignal_sets_init6F_v_;
text: .text%__1cCosHSolarisPinit_signal_mem6F_v_; text: .text%__1cCosHSolarisPinit_signal_mem6F_v_;
text: .text%__1cCosHSolarisXinstall_signal_handlers6F_v_; text: .text%__1cCosHSolarisXinstall_signal_handlers6F_v_;
@ -107,7 +97,6 @@ text: .text%__1cSThreadLocalStorageEinit6F_v_;
text: .text%__1cSThreadLocalStorageHpd_init6F_v_; text: .text%__1cSThreadLocalStorageHpd_init6F_v_;
text: .text%__1cCosbDallocate_thread_local_storage6F_i_; text: .text%__1cCosbDallocate_thread_local_storage6F_i_;
text: .text%__1cSThreadLocalStoragebCgenerate_code_for_get_thread6F_v_; text: .text%__1cSThreadLocalStoragebCgenerate_code_for_get_thread6F_v_;
text: .text%__1cRAllocateTLSOffset6F_v_: threadLS_solaris_x86.o;
text: .text%__1cPvm_init_globals6F_v_; text: .text%__1cPvm_init_globals6F_v_;
text: .text%__1cScheck_ThreadShadow6F_v_; text: .text%__1cScheck_ThreadShadow6F_v_;
text: .text%__1cNeventlog_init6F_v_; text: .text%__1cNeventlog_init6F_v_;
@ -119,17 +108,8 @@ text: .text%__1cPperfMemory_init6F_v_;
text: .text%__1cKPerfMemoryKinitialize6F_v_; text: .text%__1cKPerfMemoryKinitialize6F_v_;
text: .text%__1cCosZvm_allocation_granularity6F_i_; text: .text%__1cCosZvm_allocation_granularity6F_i_;
text: .text%__1cKPerfMemoryUcreate_memory_region6FI_v_; text: .text%__1cKPerfMemoryUcreate_memory_region6FI_v_;
text: .text%__1cUcreate_shared_memory6FI_pc_: perfMemory_solaris.o;
text: .text%__1cSmmap_create_shared6FI_pc_: perfMemory_solaris.o;
text: .text%__1cCosScurrent_process_id6F_i_; text: .text%__1cCosScurrent_process_id6F_i_;
text: .text%__1cNget_user_name6Fl_pc_: perfMemory_solaris.o;
text: .text%__1cQget_user_tmp_dir6Fpkc_pc_: perfMemory_solaris.o;
text: .text%__1cCosSget_temp_directory6F_pkc_; text: .text%__1cCosSget_temp_directory6F_pkc_;
text: .text%__1cWget_sharedmem_filename6Fpkci_pc_: perfMemory_solaris.o;
text: .text%__1cbBcleanup_sharedmem_resources6Fpkc_v_: perfMemory_solaris.o;
text: .text%lstat: perfMemory_solaris.o;
text: .text%__1cPfilename_to_pid6Fpkc_l_: perfMemory_solaris.o;
text: .text%__1cbAcreate_sharedmem_resources6Fpkc1I_i_: perfMemory_solaris.o;
text: .text%__1cGThread2t6M_v_; text: .text%__1cGThread2t6M_v_;
text: .text%__1cFArena2t6M_v_; text: .text%__1cFArena2t6M_v_;
text: .text%__1cFChunk2n6FII_pv_; text: .text%__1cFChunk2n6FII_pv_;
@ -161,7 +141,6 @@ text: .text%__1cCosRinitialize_thread6F_v_;
text: .text%__1cNReservedSpaceUpage_align_size_down6FI_I_; text: .text%__1cNReservedSpaceUpage_align_size_down6FI_I_;
text: .text%__1cCosHSolarisVinit_thread_fpu_state6F_v_; text: .text%__1cCosHSolarisVinit_thread_fpu_state6F_v_;
text: .text%__1cOJNIHandleBlockOallocate_block6FpnGThread__p0_; text: .text%__1cOJNIHandleBlockOallocate_block6FpnGThread__p0_;
text: .text%__1cQcreate_os_thread6FpnGThread_I_pnIOSThread__: os_solaris.o;
text: .text%__1cIOSThread2t6MpFpv_i1_v_; text: .text%__1cIOSThread2t6MpFpv_i1_v_;
text: .text%__1cIOSThreadNpd_initialize6M_v_; text: .text%__1cIOSThreadNpd_initialize6M_v_;
text: .text%__1cCosHSolarisPhotspot_sigmask6FpnGThread__v_; text: .text%__1cCosHSolarisPhotspot_sigmask6FpnGThread__v_;
@ -186,7 +165,6 @@ text: .text%__1cNThreadServiceEinit6F_v_;
text: .text%__1cPPerfDataManagerTcreate_long_counter6FnJCounterNS_pkcnIPerfDataFUnits_xpnGThread__pnPPerfLongCounter__; text: .text%__1cPPerfDataManagerTcreate_long_counter6FnJCounterNS_pkcnIPerfDataFUnits_xpnGThread__pnPPerfLongCounter__;
text: .text%__1cORuntimeServiceEinit6F_v_; text: .text%__1cORuntimeServiceEinit6F_v_;
text: .text%__1cTClassLoadingServiceEinit6F_v_; text: .text%__1cTClassLoadingServiceEinit6F_v_;
text: .text%__1cKvtune_init6F_v_;
text: .text%__1cObytecodes_init6F_v_; text: .text%__1cObytecodes_init6F_v_;
text: .text%__1cJBytecodesKinitialize6F_v_; text: .text%__1cJBytecodesKinitialize6F_v_;
text: .text%__1cJBytecodesNpd_initialize6F_v_; text: .text%__1cJBytecodesNpd_initialize6F_v_;
@ -200,7 +178,6 @@ text: .text%__1cKHandleMark2T6M_v_;
text: .text%__1cLClassLoaderbBsetup_bootstrap_search_path6F_v_; text: .text%__1cLClassLoaderbBsetup_bootstrap_search_path6F_v_;
text: .text%__1cCosGstrdup6Fpkc_pc_; text: .text%__1cCosGstrdup6Fpkc_pc_;
text: .text%__1cCosEstat6FpkcpnEstat__i_; text: .text%__1cCosEstat6FpkcpnEstat__i_;
text: .text%stat: os_solaris.o;
text: .text%JVM_RawMonitorCreate; text: .text%JVM_RawMonitorCreate;
text: .text%JVM_NativePath; text: .text%JVM_NativePath;
text: .text%JVM_RawMonitorEnter; text: .text%JVM_RawMonitorEnter;
@ -216,7 +193,6 @@ text: .text%__1cNReservedSpace2t6MI_v_;
text: .text%__1cMVirtualSpaceQuncommitted_size6kM_I_; text: .text%__1cMVirtualSpaceQuncommitted_size6kM_I_;
text: .text%__1cMVirtualSpaceNreserved_size6kM_I_; text: .text%__1cMVirtualSpaceNreserved_size6kM_I_;
text: .text%__1cMVirtualSpaceOcommitted_size6kM_I_; text: .text%__1cMVirtualSpaceOcommitted_size6kM_I_;
text: .text%__1cSalign_to_page_size6FI_I_: heap.o;
text: .text%__1cICodeHeapFclear6M_v_; text: .text%__1cICodeHeapFclear6M_v_;
text: .text%__1cICodeHeapTmark_segmap_as_free6MII_v_; text: .text%__1cICodeHeapTmark_segmap_as_free6MII_v_;
text: .text%__1cNMemoryServiceZadd_code_heap_memory_pool6FpnICodeHeap__v_; text: .text%__1cNMemoryServiceZadd_code_heap_memory_pool6FpnICodeHeap__v_;
@ -244,9 +220,7 @@ text: .text%__1cKMemoryPoolYrecord_peak_memory_usage6M_v_;
text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__; text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__;
text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o; text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o;
text: .text%__1cICodeHeapSallocated_capacity6kM_I_; text: .text%__1cICodeHeapSallocated_capacity6kM_I_;
text: .text%__1cKMemoryPoolImax_size6kM_I_: memoryPool.o;
text: .text%__1cXresource_allocate_bytes6FI_pc_; text: .text%__1cXresource_allocate_bytes6FI_pc_;
text: .text%__1cKCodeBuffer2t6MpCi_v_;
text: .text%__1cRAbstractAssembler2t6MpnKCodeBuffer__v_; text: .text%__1cRAbstractAssembler2t6MpnKCodeBuffer__v_;
text: .text%__1cYVM_Version_StubGeneratorTgenerate_getPsrInfo6M_pC_: vm_version_x86.o; text: .text%__1cYVM_Version_StubGeneratorTgenerate_getPsrInfo6M_pC_: vm_version_x86.o;
text: .text%__1cMStubCodeMark2t6MpnRStubCodeGenerator_pkc4_v_; text: .text%__1cMStubCodeMark2t6MpnRStubCodeGenerator_pkc4_v_;
@ -269,10 +243,8 @@ text: .text%__1cJAssemblerDret6Mi_v_;
text: .text%__1cMStubCodeMark2T6M_v_; text: .text%__1cMStubCodeMark2T6M_v_;
text: .text%__1cRAbstractAssemblerFflush6M_v_; text: .text%__1cRAbstractAssemblerFflush6M_v_;
text: .text%__1cRStubCodeGeneratorLstub_epilog6MpnMStubCodeDesc__v_; text: .text%__1cRStubCodeGeneratorLstub_epilog6MpnMStubCodeDesc__v_;
text: .text%__1cFVTuneNregister_stub6FpkcpC3_v_;
text: .text%__1cFForteNregister_stub6FpkcpC3_v_; text: .text%__1cFForteNregister_stub6FpkcpC3_v_;
text: .text%__1cKVM_VersionWget_processor_features6F_v_; text: .text%__1cKVM_VersionWget_processor_features6F_v_;
text: .text%__1cVcheck_for_sse_support6F_v_: os_solaris_x86.o;
text: .text%jio_snprintf; text: .text%jio_snprintf;
text: .text%jio_vsnprintf; text: .text%jio_vsnprintf;
text: .text%__1cPlocal_vsnprintf6FpcIpkcpv_i_; text: .text%__1cPlocal_vsnprintf6FpcIpkcpv_i_;
@ -651,7 +623,6 @@ text: .text%__1cZInterpreterMacroAssemblerWprofile_switch_default6MpnMRegisterIm
text: .text%__1cNTemplateTableMlookupswitch6F_v_; text: .text%__1cNTemplateTableMlookupswitch6F_v_;
text: .text%__1cNTemplateTableH_return6FnITosState__v_; text: .text%__1cNTemplateTableH_return6FnITosState__v_;
text: .text%__1cNTemplateTableJgetstatic6Fi_v_; text: .text%__1cNTemplateTableJgetstatic6Fi_v_;
text: .text%__1cNTemplateTableXresolve_cache_and_index6FipnMRegisterImpl_2_v_;
text: .text%__1cJAssemblerHfistp_d6MnHAddress__v_; text: .text%__1cJAssemblerHfistp_d6MnHAddress__v_;
text: .text%__1cNTemplateTableJputstatic6Fi_v_; text: .text%__1cNTemplateTableJputstatic6Fi_v_;
text: .text%__1cOMacroAssemblerLstore_check6MpnMRegisterImpl_nHAddress__v_; text: .text%__1cOMacroAssemblerLstore_check6MpnMRegisterImpl_nHAddress__v_;
@ -735,7 +706,6 @@ text: .text%__1cbBBlockOffsetArrayContigSpaceQalloc_block_work6MpnIHeapWord_2_v_
text: .text%__1cLsymbolKlassMcreate_klass6FpnGThread__pnMklassOopDesc__; text: .text%__1cLsymbolKlassMcreate_klass6FpnGThread__pnMklassOopDesc__;
text: .text%__1cLsymbolKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: symbolKlass.o; text: .text%__1cLsymbolKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: symbolKlass.o;
text: .text%__1cLsymbolKlassOset_alloc_size6MI_v_: symbolKlass.o; text: .text%__1cLsymbolKlassOset_alloc_size6MI_v_: symbolKlass.o;
text: .text%__1cKoopFactoryKnew_symbol6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__; text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cJHashtableLhash_symbol6Fpkci_I_: symbolTable.o; text: .text%__1cJHashtableLhash_symbol6Fpkci_I_: symbolTable.o;
text: .text%__1cLSymbolTableGlookup6MipkciI_pnNsymbolOopDesc__; text: .text%__1cLSymbolTableGlookup6MipkciI_pnNsymbolOopDesc__;
@ -806,7 +776,6 @@ text: .text%__1cQSystemDictionaryRload_shared_class6FnMsymbolHandle_nGHandle_pnG
text: .text%__1cQSystemDictionaryRfind_shared_class6FnMsymbolHandle__pnMklassOopDesc__; text: .text%__1cQSystemDictionaryRfind_shared_class6FnMsymbolHandle__pnMklassOopDesc__;
text: .text%__1cQSystemDictionaryRload_shared_class6FnTinstanceKlassHandle_nGHandle_pnGThread__1_; text: .text%__1cQSystemDictionaryRload_shared_class6FnTinstanceKlassHandle_nGHandle_pnGThread__1_;
text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__; text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__;
text: .text%__1cFVTuneQstart_class_load6F_v_;
text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o; text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o;
text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_; text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_;
text: .text%__1cMstringStream2t6MI_v_; text: .text%__1cMstringStream2t6MI_v_;
@ -854,7 +823,6 @@ text: .text%__1cPClassFileParserYparse_checked_exceptions6MpHInSconstantPoolHand
text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_; text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_;
text: .text%__1cSconstMethodOopDescbEchecked_exceptions_length_addr6kM_pH_; text: .text%__1cSconstMethodOopDescbEchecked_exceptions_length_addr6kM_pH_;
text: .text%__1cSconstMethodOopDescYchecked_exceptions_start6kM_pnXCheckedExceptionElement__; text: .text%__1cSconstMethodOopDescYchecked_exceptions_start6kM_pnXCheckedExceptionElement__;
text: .text%__1cXcopy_u2_with_conversion6FpH0i_v_: classFileParser.o;
text: .text%__1cPClassFileParserbDcompute_transitive_interfaces6MnTinstanceKlassHandle_nOobjArrayHandle_pnGThread__2_; text: .text%__1cPClassFileParserbDcompute_transitive_interfaces6MnTinstanceKlassHandle_nOobjArrayHandle_pnGThread__2_;
text: .text%__1cPClassFileParserMsort_methods6MnOobjArrayHandle_111pnGThread__nPtypeArrayHandle__; text: .text%__1cPClassFileParserMsort_methods6MnOobjArrayHandle_111pnGThread__nPtypeArrayHandle__;
text: .text%method_compare: methodOop.o; text: .text%method_compare: methodOop.o;
@ -862,9 +830,6 @@ text: .text%__1cLklassVtableQget_num_mirandas6FpnMklassOopDesc_pnPobjArrayOopDes
text: .text%__1cLklassVtableMget_mirandas6FpnNGrowableArray4CpnNmethodOopDesc___pnMklassOopDesc_pnPobjArrayOopDesc_8_v_; text: .text%__1cLklassVtableMget_mirandas6FpnNGrowableArray4CpnNmethodOopDesc___pnMklassOopDesc_pnPobjArrayOopDesc_8_v_;
text: .text%__1cLklassItableTcompute_itable_size6FnOobjArrayHandle__i_; text: .text%__1cLklassItableTcompute_itable_size6FnOobjArrayHandle__i_;
text: .text%__1cUvisit_all_interfaces6FpnPobjArrayOopDesc_pnXInterfaceVisiterClosure__v_; text: .text%__1cUvisit_all_interfaces6FpnPobjArrayOopDesc_pnXInterfaceVisiterClosure__v_;
text: .text%__1cPClassFileParserUcompute_oop_map_size6MnTinstanceKlassHandle_ii_i_;
text: .text%__1cKoopFactoryRnew_instanceKlass6FiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cSinstanceKlassKlassXallocate_instance_klass6MiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o; text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o;
text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o; text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o;
text: .text%__1cNinstanceKlassQinit_implementor6M_v_; text: .text%__1cNinstanceKlassQinit_implementor6M_v_;
@ -878,7 +843,6 @@ text: .text%__1cNinstanceKlassYcompute_secondary_supers6MipnGThread__pnPobjArray
text: .text%__1cLklassItableZsetup_itable_offset_table6FnTinstanceKlassHandle__v_; text: .text%__1cLklassItableZsetup_itable_offset_table6FnTinstanceKlassHandle__v_;
text: .text%__1cFKlassKsuperklass6kM_pnNinstanceKlass__; text: .text%__1cFKlassKsuperklass6kM_pnNinstanceKlass__;
text: .text%__1cPClassFileParserVset_precomputed_flags6MnTinstanceKlassHandle__v_; text: .text%__1cPClassFileParserVset_precomputed_flags6MnTinstanceKlassHandle__v_;
text: .text%__1cFKlassNlookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cNinstanceKlassWuncached_lookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassWuncached_lookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cNinstanceKlassLfind_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassLfind_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cNinstanceKlassLfind_method6FpnPobjArrayOopDesc_pnNsymbolOopDesc_4_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassLfind_method6FpnPobjArrayOopDesc_pnNsymbolOopDesc_4_pnNmethodOopDesc__;
@ -893,7 +857,6 @@ text: .text%__1cQPackageHashtableMcompute_hash6Mpkci_I_: classLoader.o;
text: .text%__1cQPackageHashtableJget_entry6MiIpkcI_pnLPackageInfo__: classLoader.o; text: .text%__1cQPackageHashtableJget_entry6MiIpkcI_pnLPackageInfo__: classLoader.o;
text: .text%__1cMstringStream2T6M_v_; text: .text%__1cMstringStream2T6M_v_;
text: .text%__1cSThreadProfilerMark2T6M_v_; text: .text%__1cSThreadProfilerMark2T6M_v_;
text: .text%__1cFVTuneOend_class_load6F_v_;
text: .text%__1cQSystemDictionaryVdefine_instance_class6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cQSystemDictionaryVdefine_instance_class6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cVLoaderConstraintTableWfind_loader_constraint6MnMsymbolHandle_nGHandle__ppnVLoaderConstraintEntry__; text: .text%__1cVLoaderConstraintTableWfind_loader_constraint6MnMsymbolHandle_nGHandle__ppnVLoaderConstraintEntry__;
text: .text%__1cQSystemDictionaryQadd_to_hierarchy6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cQSystemDictionaryQadd_to_hierarchy6FnTinstanceKlassHandle_pnGThread__v_;
@ -970,8 +933,6 @@ text: .text%__1cbBjava_lang_ref_SoftReferenceJset_clock6Fx_v_;
text: .text%__1cQjni_handles_init6F_v_; text: .text%__1cQjni_handles_init6F_v_;
text: .text%__1cKJNIHandlesKinitialize6F_v_; text: .text%__1cKJNIHandlesKinitialize6F_v_;
text: .text%__1cOvmStructs_init6F_v_; text: .text%__1cOvmStructs_init6F_v_;
text: .text%__1cIFrameMapEinit6F_v_;
text: .text%__1cIRuntime1Kinitialize6F_v_;
text: .text%__1cIRuntime1Ninitialize_pd6F_v_; text: .text%__1cIRuntime1Ninitialize_pd6F_v_;
text: .text%__1cNSharedRuntimeTgenerate_deopt_blob6F_v_; text: .text%__1cNSharedRuntimeTgenerate_deopt_blob6F_v_;
text: .text%__1cJOopMapSet2t6M_v_; text: .text%__1cJOopMapSet2t6M_v_;
@ -996,7 +957,6 @@ text: .text%__1cJOopMapSetJheap_size6kM_i_;
text: .text%__1cGOopMapJheap_size6kM_i_; text: .text%__1cGOopMapJheap_size6kM_i_;
text: .text%__1cJOopMapSetHcopy_to6MpC_v_; text: .text%__1cJOopMapSetHcopy_to6MpC_v_;
text: .text%__1cGOopMapHcopy_to6MpC_v_; text: .text%__1cGOopMapHcopy_to6MpC_v_;
text: .text%__1cIRuntime1Rgenerate_blob_for6Fn0AGStubID__v_;
text: .text%__1cLOopRecorder2t6MpnFArena__v_; text: .text%__1cLOopRecorder2t6MpnFArena__v_;
text: .text%__1cIRuntime1Iname_for6Fn0AGStubID__pkc_; text: .text%__1cIRuntime1Iname_for6Fn0AGStubID__pkc_;
text: .text%__1cLRuntimeStub2n6FII_pv_; text: .text%__1cLRuntimeStub2n6FII_pv_;
@ -1004,7 +964,6 @@ text: .text%__1cNStubAssemblerHcall_RT6MpnMRegisterImpl_2pC2_i_;
text: .text%__1cNStubAssemblerHcall_RT6MpnMRegisterImpl_2pCi_i_; text: .text%__1cNStubAssemblerHcall_RT6MpnMRegisterImpl_2pCi_i_;
text: .text%__1cJStubFrame2T6M_v_; text: .text%__1cJStubFrame2T6M_v_;
text: .text%__1cOMacroAssemblerLtlab_refill6MrnFLabel_22_v_; text: .text%__1cOMacroAssemblerLtlab_refill6MrnFLabel_22_v_;
text: .text%__1cLlog2_intptr6Fi_i_: assembler_x86.o;
text: .text%__1cOMacroAssemblerNeden_allocate6MpnMRegisterImpl_2i2rnFLabel__v_; text: .text%__1cOMacroAssemblerNeden_allocate6MpnMRegisterImpl_2i2rnFLabel__v_;
text: .text%__1cOMacroAssemblerLverify_tlab6M_v_; text: .text%__1cOMacroAssemblerLverify_tlab6M_v_;
text: .text%__1cOMacroAssemblerNtlab_allocate6MpnMRegisterImpl_2i22rnFLabel__v_; text: .text%__1cOMacroAssemblerNtlab_allocate6MpnMRegisterImpl_2i22rnFLabel__v_;
@ -1033,7 +992,6 @@ text: .text%__1cPICStubInterfaceRcode_size_to_size6kMi_i_: icBuffer.o;
text: .text%__1cPICStubInterfaceKinitialize6MpnEStub_i_v_: icBuffer.o; text: .text%__1cPICStubInterfaceKinitialize6MpnEStub_i_v_: icBuffer.o;
text: .text%__1cTcompilerOracle_init6F_v_; text: .text%__1cTcompilerOracle_init6F_v_;
text: .text%__1cOCompilerOraclePparse_from_file6F_v_; text: .text%__1cOCompilerOraclePparse_from_file6F_v_;
text: .text%__1cHcc_file6F_pkc_: compilerOracle.o;
text: .text%__1cWcompilationPolicy_init6F_v_; text: .text%__1cWcompilationPolicy_init6F_v_;
text: .text%__1cNinstanceKlassGvtable6kM_pnLklassVtable__; text: .text%__1cNinstanceKlassGvtable6kM_pnLklassVtable__;
text: .text%__1cNinstanceKlassKjava_super6kM_pnMklassOopDesc__: instanceKlass.o; text: .text%__1cNinstanceKlassKjava_super6kM_pnMklassOopDesc__: instanceKlass.o;
@ -1076,18 +1034,14 @@ text: .text%__1cNinstanceKlassKlink_class6MpnGThread__v_;
text: .text%__1cNinstanceKlassNrewrite_class6MpnGThread__v_; text: .text%__1cNinstanceKlassNrewrite_class6MpnGThread__v_;
text: .text%__1cIRewriterHrewrite6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cIRewriterHrewrite6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cYconstantPoolCacheOopDescKinitialize6MrnIintArray__v_; text: .text%__1cYconstantPoolCacheOopDescKinitialize6MrnIintArray__v_;
text: .text%__1cWConstantPoolCacheEntryRset_initial_state6Mi_v_;
text: .text%__1cTAbstractInterpreterLmethod_kind6FnMmethodHandle__n0AKMethodKind__; text: .text%__1cTAbstractInterpreterLmethod_kind6FnMmethodHandle__n0AKMethodKind__;
text: .text%__1cMNativeLookupTbase_library_lookup6Fpkc22_pC_; text: .text%__1cMNativeLookupTbase_library_lookup6Fpkc22_pC_;
text: .text%__1cMNativeLookupNpure_jni_name6FnMmethodHandle__pc_; text: .text%__1cMNativeLookupNpure_jni_name6FnMmethodHandle__pc_;
text: .text%__1cMoutputStreamFprint6MpkcE_v_; text: .text%__1cMoutputStreamFprint6MpkcE_v_;
text: .text%__1cNmethodOopDescKklass_name6kM_pnNsymbolOopDesc__; text: .text%__1cNmethodOopDescKklass_name6kM_pnNsymbolOopDesc__;
text: .text%__1cOmangle_name_on6FpnMoutputStream_pnNsymbolOopDesc__v_: nativeLookup.o;
text: .text%__1cOmangle_name_on6FpnMoutputStream_pnNsymbolOopDesc_ii_v_: nativeLookup.o;
text: .text%__1cMoutputStreamDput6Mc_v_; text: .text%__1cMoutputStreamDput6Mc_v_;
text: .text%__1cCosYprint_jni_name_prefix_on6FpnMoutputStream_i_v_; text: .text%__1cCosYprint_jni_name_prefix_on6FpnMoutputStream_i_v_;
text: .text%__1cCosYprint_jni_name_suffix_on6FpnMoutputStream_i_v_; text: .text%__1cCosYprint_jni_name_suffix_on6FpnMoutputStream_i_v_;
text: .text%__1cVlookup_special_native6Fpc_pC_: nativeLookup.o;
text: .text%__1cbEinitialize_converter_functions6F_v_; text: .text%__1cbEinitialize_converter_functions6F_v_;
text: .text%__1cIUniverseWupdate_heap_info_at_gc6F_v_; text: .text%__1cIUniverseWupdate_heap_info_at_gc6F_v_;
text: .text%__1cQGenCollectedHeapIcapacity6kM_I_; text: .text%__1cQGenCollectedHeapIcapacity6kM_I_;
@ -1137,7 +1091,6 @@ text: .text%__1cCosMstart_thread6FpnGThread__v_;
text: .text%__1cCosPpd_start_thread6FpnGThread__v_; text: .text%__1cCosPpd_start_thread6FpnGThread__v_;
text: .text%__1cCosTset_native_priority6FpnGThread_i_nIOSReturn__; text: .text%__1cCosTset_native_priority6FpnGThread_i_nIOSReturn__;
text: .text%__1cQset_lwp_priority6Fiii_i_; text: .text%__1cQset_lwp_priority6Fiii_i_;
text: .text%__1cVscale_to_lwp_priority6Fiii_i_: os_solaris.o;
text: .text%__1cIVMThreadDrun6M_v_; text: .text%__1cIVMThreadDrun6M_v_;
text: .text%__1cIVMThreadEloop6M_v_; text: .text%__1cIVMThreadEloop6M_v_;
text: .text%__1cQVMOperationQdDueueLremove_next6M_pnMVM_Operation__; text: .text%__1cQVMOperationQdDueueLremove_next6M_pnMVM_Operation__;
@ -1154,7 +1107,6 @@ text: .text%__1cNinstanceKlassRclass_initializer6M_pnNmethodOopDesc__;
text: .text%__1cJJavaCallsEcall6FpnJJavaValue_nMmethodHandle_pnRJavaCallArguments_pnGThread__v_; text: .text%__1cJJavaCallsEcall6FpnJJavaValue_nMmethodHandle_pnRJavaCallArguments_pnGThread__v_;
text: .text%__1cCosUos_exception_wrapper6FpFpnJJavaValue_pnMmethodHandle_pnRJavaCallArguments_pnGThread__v2468_v_; text: .text%__1cCosUos_exception_wrapper6FpFpnJJavaValue_pnMmethodHandle_pnRJavaCallArguments_pnGThread__v2468_v_;
text: .text%__1cJJavaCallsLcall_helper6FpnJJavaValue_pnMmethodHandle_pnRJavaCallArguments_pnGThread__v_; text: .text%__1cJJavaCallsLcall_helper6FpnJJavaValue_pnMmethodHandle_pnRJavaCallArguments_pnGThread__v_;
text: .text%__1cRruntime_type_from6FpnJJavaValue__nJBasicType__: javaCalls.o;
text: .text%__1cTAbstractInterpreterbFsize_top_interpreter_activation6FpnNmethodOopDesc__i_; text: .text%__1cTAbstractInterpreterbFsize_top_interpreter_activation6FpnNmethodOopDesc__i_;
text: .text%__1cPJavaCallWrapper2t6MnMmethodHandle_nGHandle_pnJJavaValue_pnGThread__v_; text: .text%__1cPJavaCallWrapper2t6MnMmethodHandle_nGHandle_pnJJavaValue_pnGThread__v_;
text: .text%__1cRJavaCallArgumentsKparameters6M_pi_; text: .text%__1cRJavaCallArgumentsKparameters6M_pi_;
@ -1192,7 +1144,6 @@ text: .text%__1cXSignatureHandlerLibraryLset_handler6FpnKCodeBuffer__pC_;
text: .text%__1cXSignatureHandlerLibraryOpd_set_handler6FpC_v_; text: .text%__1cXSignatureHandlerLibraryOpd_set_handler6FpC_v_;
text: .text%jni_RegisterNatives: jni.o; text: .text%jni_RegisterNatives: jni.o;
text: .text%__1cPjava_lang_ClassLas_klassOop6FpnHoopDesc__pnMklassOopDesc__; text: .text%__1cPjava_lang_ClassLas_klassOop6FpnHoopDesc__pnMklassOopDesc__;
text: .text%__1cLSymbolTableFprobe6Fpkci_pnNsymbolOopDesc__;
text: .text%__1cPJavaCallWrapper2T6M_v_; text: .text%__1cPJavaCallWrapper2T6M_v_;
text: .text%__1cOJNIHandleBlockNrelease_block6Fp0pnGThread__v_; text: .text%__1cOJNIHandleBlockNrelease_block6Fp0pnGThread__v_;
text: .text%__1cNinstanceKlassbJset_initialization_state_and_notify6Mn0AKClassState_pnGThread__v_; text: .text%__1cNinstanceKlassbJset_initialization_state_and_notify6Mn0AKClassState_pnGThread__v_;
@ -1206,7 +1157,6 @@ text: .text%__1cPFieldAccessInfoDset6MnLKlassHandle_nMsymbolHandle_iinJBasicType
text: .text%__1cWConstantPoolCacheEntryOset_bytecode_26MnJBytecodesECode__v_; text: .text%__1cWConstantPoolCacheEntryOset_bytecode_26MnJBytecodesECode__v_;
text: .text%__1cSInterpreterRuntimeE_new6FpnKJavaThread_pnTconstantPoolOopDesc_i_v_; text: .text%__1cSInterpreterRuntimeE_new6FpnKJavaThread_pnTconstantPoolOopDesc_i_v_;
text: .text%__1cMLinkResolverVresolve_invokespecial6FrnICallInfo_nSconstantPoolHandle_ipnGThread__v_; text: .text%__1cMLinkResolverVresolve_invokespecial6FrnICallInfo_nSconstantPoolHandle_ipnGThread__v_;
text: .text%__1cWConstantPoolCacheEntryPbytecode_number6FnJBytecodesECode__i_: cpCacheOop.o;
text: .text%__1cNSignatureInfoJdo_object6Mii_v_: bytecode.o; text: .text%__1cNSignatureInfoJdo_object6Mii_v_: bytecode.o;
text: .text%__1cNSignatureInfoHdo_long6M_v_: bytecode.o; text: .text%__1cNSignatureInfoHdo_long6M_v_: bytecode.o;
text: .text%JVM_CurrentTimeMillis; text: .text%JVM_CurrentTimeMillis;
@ -1225,8 +1175,6 @@ text: .text%__1cQjava_lang_StringbBcreate_tenured_from_unicode6FpHipnGThread__nG
text: .text%__1cKoopFactoryXnew_permanent_charArray6FipnGThread__pnQtypeArrayOopDesc__; text: .text%__1cKoopFactoryXnew_permanent_charArray6FipnGThread__pnQtypeArrayOopDesc__;
text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_; text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_;
text: .text%__1cNmethodOopDescIbci_from6kMpC_i_; text: .text%__1cNmethodOopDescIbci_from6kMpC_i_;
text: .text%__1cPBytecode_invokeJsignature6kM_pnNsymbolOopDesc__;
text: .text%__1cPBytecode_invokeFindex6kM_i_;
text: .text%__1cNmethodOopDescIbcp_from6kMi_pC_; text: .text%__1cNmethodOopDescIbcp_from6kMi_pC_;
text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__; text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__;
text: .text%__1cMLinkResolverVresolve_invokevirtual6FrnICallInfo_nGHandle_nSconstantPoolHandle_ipnGThread__v_; text: .text%__1cMLinkResolverVresolve_invokevirtual6FrnICallInfo_nGHandle_nSconstantPoolHandle_ipnGThread__v_;
@ -1258,7 +1206,6 @@ text: .text%jni_GetStringUTFChars: jni.o;
text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc__pc_; text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc__pc_;
text: .text%__1cHUNICODEHas_utf86FpHi_pc_; text: .text%__1cHUNICODEHas_utf86FpHi_pc_;
text: .text%__1cHUNICODELutf8_length6FpHi_i_; text: .text%__1cHUNICODELutf8_length6FpHi_i_;
text: .text%__1cKutf8_write6FpCH_0_: utf8.o;
text: .text%JVM_FindPrimitiveClass; text: .text%JVM_FindPrimitiveClass;
text: .text%__1cJname2type6Fpkc_nJBasicType__; text: .text%__1cJname2type6Fpkc_nJBasicType__;
text: .text%jni_ReleaseStringUTFChars; text: .text%jni_ReleaseStringUTFChars;
@ -1300,7 +1247,6 @@ text: .text%__1cKJavaThreadRthread_main_inner6M_v_;
text: .text%__1cMthread_entry6FpnKJavaThread_pnGThread__v_: jvm.o; text: .text%__1cMthread_entry6FpnKJavaThread_pnGThread__v_: jvm.o;
text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_5pnGThread__v_; text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_5pnGThread__v_;
text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4pnRJavaCallArguments_pnGThread__v_; text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4pnRJavaCallArguments_pnGThread__v_;
text: .text%__1cIBytecodeIset_code6MnJBytecodesECode__v_;
text: .text%__1cNFingerprinterHdo_long6M_v_: dump.o; text: .text%__1cNFingerprinterHdo_long6M_v_: dump.o;
text: .text%JVM_MonitorWait; text: .text%JVM_MonitorWait;
text: .text%__1cQjava_lang_ThreadRget_thread_status6FpnHoopDesc__n0AMThreadStatus__; text: .text%__1cQjava_lang_ThreadRget_thread_status6FpnHoopDesc__n0AMThreadStatus__;
@ -1314,23 +1260,19 @@ text: .text%__1cNmethodOopDescKjmethod_id6M_pnK_jmethodID__;
text: .text%jni_NewStringUTF: jni.o; text: .text%jni_NewStringUTF: jni.o;
text: .text%jni_CallObjectMethod: jni.o; text: .text%jni_CallObjectMethod: jni.o;
text: .text%__1cRSignatureIterator2t6MpnGThread_pnNsymbolOopDesc__v_; text: .text%__1cRSignatureIterator2t6MpnGThread_pnNsymbolOopDesc__v_;
text: .text%__1cUjni_invoke_nonstatic6FpnHJNIEnv__pnJJavaValue_pnI_jobject_nLJNICallType_pnK_jmethodID_pnSJNI_ArgumentPusher_pnGThread__v_: jni.o;
text: .text%__1cXJNI_ArgumentPusherVaArgHiterate6MX_v_: jni.o; text: .text%__1cXJNI_ArgumentPusherVaArgHiterate6MX_v_: jni.o;
text: .text%__1cXJNI_ArgumentPusherVaArgKget_object6M_v_: jni.o; text: .text%__1cXJNI_ArgumentPusherVaArgKget_object6M_v_: jni.o;
text: .text%jni_ExceptionOccurred: jni.o; text: .text%jni_ExceptionOccurred: jni.o;
text: .text%__1cbAjni_check_async_exceptions6FpnKJavaThread__v_: jni.o;
text: .text%jni_DeleteLocalRef: jni.o; text: .text%jni_DeleteLocalRef: jni.o;
text: .text%__1cOJNIHandleBlockRrebuild_free_list6M_v_; text: .text%__1cOJNIHandleBlockRrebuild_free_list6M_v_;
text: .text%jni_EnsureLocalCapacity; text: .text%jni_EnsureLocalCapacity;
text: .text%jni_GetStaticMethodID: jni.o; text: .text%jni_GetStaticMethodID: jni.o;
text: .text%jni_CallStaticObjectMethodV: jni.o; text: .text%jni_CallStaticObjectMethodV: jni.o;
text: .text%__1cRjni_invoke_static6FpnHJNIEnv__pnJJavaValue_pnI_jobject_nLJNICallType_pnK_jmethodID_pnSJNI_ArgumentPusher_pnGThread__v_: jni.o;
text: .text%__1cMLinkResolverbHlookup_instance_method_in_klasses6FrnMmethodHandle_nLKlassHandle_nMsymbolHandle_4pnGThread__v_; text: .text%__1cMLinkResolverbHlookup_instance_method_in_klasses6FrnMmethodHandle_nLKlassHandle_nMsymbolHandle_4pnGThread__v_;
text: .text%jni_ExceptionCheck: jni.o; text: .text%jni_ExceptionCheck: jni.o;
text: .text%jni_NewString: jni.o; text: .text%jni_NewString: jni.o;
text: .text%__1cQjava_lang_StringXcreate_oop_from_unicode6FpHipnGThread__pnHoopDesc__; text: .text%__1cQjava_lang_StringXcreate_oop_from_unicode6FpHipnGThread__pnHoopDesc__;
text: .text%JVM_InitProperties; text: .text%JVM_InitProperties;
text: .text%__1cMset_property6FnGHandle_pkc2pnGThread__v_: jvm.o;
text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_; text: .text%__1cJJavaCallsMcall_virtual6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_;
text: .text%__1cYNoJvmtiVMObjectAllocMark2t6M_v_; text: .text%__1cYNoJvmtiVMObjectAllocMark2t6M_v_;
text: .text%__1cYNoJvmtiVMObjectAllocMark2T6M_v_; text: .text%__1cYNoJvmtiVMObjectAllocMark2T6M_v_;
@ -1394,7 +1336,6 @@ text: .text%__1cRLowMemoryDetectorbLdetect_low_memory_for_collected_pools6F_v_:
text: .text%jni_GetStringUTFLength: jni.o; text: .text%jni_GetStringUTFLength: jni.o;
text: .text%__1cQjava_lang_StringLutf8_length6FpnHoopDesc__i_; text: .text%__1cQjava_lang_StringLutf8_length6FpnHoopDesc__i_;
text: .text%jni_GetStringLength: jni.o; text: .text%jni_GetStringLength: jni.o;
text: .text%__1cQjava_lang_StringGlength6FpnHoopDesc__i_;
text: .text%jni_GetStringUTFRegion: jni.o; text: .text%jni_GetStringUTFRegion: jni.o;
text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc_ii_pc_; text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc_ii_pc_;
text: .text%JVM_FindClassFromClassLoader; text: .text%JVM_FindClassFromClassLoader;
@ -1445,13 +1386,9 @@ text: .text%Unsafe_FreeMemory;
text: .text%__1cNSignatureInfoIdo_float6M_v_: bytecode.o; text: .text%__1cNSignatureInfoIdo_float6M_v_: bytecode.o;
text: .text%__1cFJNIidEfind6Mi_p0_; text: .text%__1cFJNIidEfind6Mi_p0_;
text: .text%jni_NewObjectV: jni.o; text: .text%jni_NewObjectV: jni.o;
text: .text%__1cMalloc_object6FpnH_jclass_pnGThread__pnPinstanceOopDesc__: jni.o;
text: .text%jni_GetStringRegion: jni.o; text: .text%jni_GetStringRegion: jni.o;
text: .text%__1cQjava_lang_StringGoffset6FpnHoopDesc__i_;
text: .text%__1cQjava_lang_StringFvalue6FpnHoopDesc__pnQtypeArrayOopDesc__;
text: .text%jni_GetObjectField: jni.o; text: .text%jni_GetObjectField: jni.o;
text: .text%jni_GetStringCritical: jni.o; text: .text%jni_GetStringCritical: jni.o;
text: .text%__1cJGC_lockerNlock_critical6FpnKJavaThread__v_: jni.o;
text: .text%jni_ReleaseStringCritical: jni.o; text: .text%jni_ReleaseStringCritical: jni.o;
text: .text%JVM_LoadLibrary; text: .text%JVM_LoadLibrary;
text: .text%JVM_FindLibraryEntry; text: .text%JVM_FindLibraryEntry;
@ -1496,10 +1433,7 @@ text: .text%__1cNobjArrayKlassIallocate6MipnGThread__pnPobjArrayOopDesc__;
text: .text%jni_SetObjectArrayElement: jni.o; text: .text%jni_SetObjectArrayElement: jni.o;
text: .text%jni_GetObjectArrayElement: jni.o; text: .text%jni_GetObjectArrayElement: jni.o;
text: .text%__1cQSimpleCompPolicyXmethod_invocation_event6MnMmethodHandle_pnGThread__v_; text: .text%__1cQSimpleCompPolicyXmethod_invocation_event6MnMmethodHandle_pnGThread__v_;
text: .text%__1cRCompilationPolicybIreset_counter_for_invocation_event6MnMmethodHandle__v_;
text: .text%__1cRInvocationCounterJset_carry6M_v_; text: .text%__1cRInvocationCounterJset_carry6M_v_;
text: .text%__1cNCompileBrokerOcompile_method6FnMmethodHandle_i1ipkcpnGThread__pnHnmethod__;
text: .text%__1cQSimpleCompPolicyRcompilation_level6MnMmethodHandle_i_i_;
text: .text%__1cNobjArrayKlassKcopy_array6MpnMarrayOopDesc_i2iipnGThread__v_; text: .text%__1cNobjArrayKlassKcopy_array6MpnMarrayOopDesc_i2iipnGThread__v_;
text: .text%__1cLJvmtiExportQenter_live_phase6F_v_; text: .text%__1cLJvmtiExportQenter_live_phase6F_v_;
text: .text%__1cLJvmtiExportTpost_vm_initialized6F_v_; text: .text%__1cLJvmtiExportTpost_vm_initialized6F_v_;
@ -1512,7 +1446,6 @@ text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMs
text: .text%__1cCosOsignal_init_pd6F_v_; text: .text%__1cCosOsignal_init_pd6F_v_;
text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_; text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_;
text: .text%__1cICompiler2t6M_v_; text: .text%__1cICompiler2t6M_v_;
text: .text%__1cNCompileBrokerVinit_compiler_threads6Fi_v_;
text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_; text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_;
text: .text%__1cNCompileBrokerUmake_compiler_thread6FpkcpnMCompileQdDueue_pnQCompilerCounters_pnGThread__pnOCompilerThread__; text: .text%__1cNCompileBrokerUmake_compiler_thread6FpkcpnMCompileQdDueue_pnQCompilerCounters_pnGThread__pnOCompilerThread__;
text: .text%__1cOCompilerThread2t6MpnMCompileQdDueue_pnQCompilerCounters__v_; text: .text%__1cOCompilerThread2t6MpnMCompileQdDueue_pnQCompilerCounters__v_;
@ -1570,10 +1503,7 @@ text: .text%__1cTjava_lang_ThrowableTfill_in_stack_trace6FnGHandle_pnGThread__v_
text: .text%__1cTjava_lang_ThrowableNset_backtrace6FpnHoopDesc_2_v_; text: .text%__1cTjava_lang_ThrowableNset_backtrace6FpnHoopDesc_2_v_;
text: .text%__1cTjava_lang_ThrowableQclear_stacktrace6FpnHoopDesc__v_; text: .text%__1cTjava_lang_ThrowableQclear_stacktrace6FpnHoopDesc__v_;
text: .text%__1cVPreserveExceptionMark2T6M_v_; text: .text%__1cVPreserveExceptionMark2T6M_v_;
text: .text%__1cKExceptionsG_throw6FpnGThread_pkcinGHandle__v_;
text: .text%__1cSInterpreterRuntimeXthrow_pending_exception6FpnKJavaThread__v_; text: .text%__1cSInterpreterRuntimeXthrow_pending_exception6FpnKJavaThread__v_;
text: .text%__1cNSharedRuntimebKexception_handler_for_return_address6FpC_1_;
text: .text%__1cNSharedRuntimebOraw_exception_handler_for_return_address6FpC_1_;
text: .text%__1cSInterpreterRuntimebFexception_handler_for_exception6FpnKJavaThread_pnHoopDesc__pC_; text: .text%__1cSInterpreterRuntimebFexception_handler_for_exception6FpnKJavaThread_pnHoopDesc__pC_;
text: .text%__1cNmethodOopDescbEfast_exception_handler_bci_for6MnLKlassHandle_ipnGThread__i_; text: .text%__1cNmethodOopDescbEfast_exception_handler_bci_for6MnLKlassHandle_ipnGThread__i_;
text: .text%__1cSInterpreterRuntimePset_bcp_and_mdp6FpCpnKJavaThread__v_; text: .text%__1cSInterpreterRuntimePset_bcp_and_mdp6FpCpnKJavaThread__v_;
@ -1588,7 +1518,6 @@ text: .text%__1cNCompileBrokerZinvoke_compiler_on_method6FpnLCompileTask__v_;
text: .text%__1cNCompileBrokerVpush_jni_handle_block6F_v_; text: .text%__1cNCompileBrokerVpush_jni_handle_block6F_v_;
text: .text%__1cPciObjectFactory2t6MpnFArena_i_v_; text: .text%__1cPciObjectFactory2t6MpnFArena_i_v_;
text: .text%__1cPciObjectFactoryTinit_shared_objects6M_v_; text: .text%__1cPciObjectFactoryTinit_shared_objects6M_v_;
text: .text%__1cIciSymbol2t6MnMsymbolHandle__v_;
text: .text%__1cIciObject2t6MnGHandle__v_; text: .text%__1cIciObject2t6MnGHandle__v_;
text: .text%__1cPciObjectFactoryEfind6MpnHoopDesc_pnNGrowableArray4CpnIciObject____i_; text: .text%__1cPciObjectFactoryEfind6MpnHoopDesc_pnNGrowableArray4CpnIciObject____i_;
text: .text%__1cPciObjectFactoryNinit_ident_of6MpnIciObject__v_; text: .text%__1cPciObjectFactoryNinit_ident_of6MpnIciObject__v_;
@ -1675,14 +1604,12 @@ text: .text%__1cOGenerateOopMapYrewrite_refval_conflicts6M_v_;
text: .text%__1cOGenerateOopMapNreport_result6M_v_; text: .text%__1cOGenerateOopMapNreport_result6M_v_;
text: .text%__1cLCompilationJbuild_hir6M_v_; text: .text%__1cLCompilationJbuild_hir6M_v_;
text: .text%__1cCIR2t6MpnLCompilation_pnIciMethod_i_v_; text: .text%__1cCIR2t6MpnLCompilation_pnIciMethod_i_v_;
text: .text%__1cJValueTypeKinitialize6F_v_;
text: .text%__1cMciNullObjectEmake6F_p0_; text: .text%__1cMciNullObjectEmake6F_p0_;
text: .text%__1cMGraphBuilderKinitialize6F_v_; text: .text%__1cMGraphBuilderKinitialize6F_v_;
text: .text%__1cJXHandlers2t6MpnIciMethod__v_; text: .text%__1cJXHandlers2t6MpnIciMethod__v_;
text: .text%__1cIciMethodJload_code6M_v_; text: .text%__1cIciMethodJload_code6M_v_;
text: .text%__1cHIRScopeLbuild_graph6MpnLCompilation_i_pnKBlockBegin__; text: .text%__1cHIRScopeLbuild_graph6MpnLCompilation_i_pnKBlockBegin__;
text: .text%__1cQBlockListBuilderLset_leaders6M_v_; text: .text%__1cQBlockListBuilderLset_leaders6M_v_;
text: .text%__1cKValueStack2t6MpnHIRScope_ii_v_;
text: .text%__1cMGraphBuilderPpush_root_scope6MpnHIRScope_pnJBlockList_pnKBlockBegin__v_; text: .text%__1cMGraphBuilderPpush_root_scope6MpnHIRScope_pnJBlockList_pnKBlockBegin__v_;
text: .text%__1cMGraphBuilderJScopeDataJset_scope6MpnHIRScope__v_; text: .text%__1cMGraphBuilderJScopeDataJset_scope6MpnHIRScope__v_;
text: .text%__1cIValueMap2t6M_v_; text: .text%__1cIValueMap2t6M_v_;
@ -1690,7 +1617,6 @@ text: .text%__1cMGraphBuilderJScopeDataQadd_to_work_list6MpnKBlockBegin__v_;
text: .text%__1cNResourceArrayGexpand6MIiri_v_; text: .text%__1cNResourceArrayGexpand6MIiri_v_;
text: .text%__1cMGraphBuilderJScopeDataVremove_from_work_list6M_pnKBlockBegin__; text: .text%__1cMGraphBuilderJScopeDataVremove_from_work_list6M_pnKBlockBegin__;
text: .text%__1cIValueMapIkill_all6M_v_; text: .text%__1cIValueMapIkill_all6M_v_;
text: .text%__1cKValueStackEcopy6M_p0_;
text: .text%__1cGValuesIpush_all6Mpk0_v_: c1_ValueStack.o; text: .text%__1cGValuesIpush_all6Mpk0_v_: c1_ValueStack.o;
text: .text%__1cMGraphBuilderbBiterate_bytecodes_for_block6Mi_pnIBlockEnd__; text: .text%__1cMGraphBuilderbBiterate_bytecodes_for_block6Mi_pnIBlockEnd__;
text: .text%__1cMGraphBuilderJScopeDataIblock_at6Mi_pnKBlockBegin__; text: .text%__1cMGraphBuilderJScopeDataIblock_at6Mi_pnKBlockBegin__;
@ -1709,8 +1635,6 @@ text: .text%__1cHciFieldPinitialize_from6MpnPfieldDescriptor__v_;
text: .text%__1cMas_ValueType6FnJBasicType__pnJValueType__; text: .text%__1cMas_ValueType6FnJBasicType__pnJValueType__;
text: .text%__1cMLinkResolverXresolve_klass_no_update6FrnLKlassHandle_nSconstantPoolHandle_ipnGThread__v_; text: .text%__1cMLinkResolverXresolve_klass_no_update6FrnLKlassHandle_nSconstantPoolHandle_ipnGThread__v_;
text: .text%__1cTconstantPoolOopDescbCklass_ref_at_if_loaded_check6FnSconstantPoolHandle_ipnGThread__pnMklassOopDesc__; text: .text%__1cTconstantPoolOopDescbCklass_ref_at_if_loaded_check6FnSconstantPoolHandle_ipnGThread__pnMklassOopDesc__;
text: .text%__1cMGraphBuilderKlock_stack6M_pnKValueStack__;
text: .text%__1cKValueStackKcopy_locks6M_p0_;
text: .text%__1cJLoadFieldFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cJLoadFieldFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerMdo_LoadField6MpnJLoadField__v_; text: .text%__1cNCanonicalizerMdo_LoadField6MpnJLoadField__v_;
text: .text%__1cJLoadFieldEhash6kM_i_: c1_Instruction.o; text: .text%__1cJLoadFieldEhash6kM_i_: c1_Instruction.o;
@ -1738,7 +1662,6 @@ text: .text%__1cMGraphBuilderNmethod_return6MpnLInstruction__v_;
text: .text%__1cGReturnFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cGReturnFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerJdo_Return6MpnGReturn__v_; text: .text%__1cNCanonicalizerJdo_Return6MpnGReturn__v_;
text: .text%__1cGReturnJas_Return6M_p0_: c1_GraphBuilder.o; text: .text%__1cGReturnJas_Return6M_p0_: c1_GraphBuilder.o;
text: .text%__1cKValueStackMcaller_state6kM_p0_;
text: .text%__1cQSystemDictionarybOfind_constrained_instance_or_array_klass6FnMsymbolHandle_nGHandle_pnGThread__pnMklassOopDesc__; text: .text%__1cQSystemDictionarybOfind_constrained_instance_or_array_klass6FnMsymbolHandle_nGHandle_pnGThread__pnMklassOopDesc__;
text: .text%__1cMGraphBuilderHif_same6MpnJValueType_nLInstructionJCondition__v_; text: .text%__1cMGraphBuilderHif_same6MpnJValueType_nLInstructionJCondition__v_;
text: .text%__1cNCanonicalizerNdo_StoreField6MpnKStoreField__v_; text: .text%__1cNCanonicalizerNdo_StoreField6MpnKStoreField__v_;
@ -1781,24 +1704,17 @@ text: .text%__1cTNullCheckEliminatorLiterate_one6MpnKBlockBegin__v_;
text: .text%__1cGBitMapIset_from6M0_v_; text: .text%__1cGBitMapIset_from6M0_v_;
text: .text%__1cQNullCheckVisitorNdo_BlockBegin6MpnKBlockBegin__v_; text: .text%__1cQNullCheckVisitorNdo_BlockBegin6MpnKBlockBegin__v_;
text: .text%__1cQNullCheckVisitorHdo_Base6MpnEBase__v_; text: .text%__1cQNullCheckVisitorHdo_Base6MpnEBase__v_;
text: .text%__1cKStateSplitPinput_values_do6MpFppnLInstruction__v_v_: c1_Canonicalizer.o;
text: .text%__1cQNullCheckVisitorHdo_Goto6MpnEGoto__v_; text: .text%__1cQNullCheckVisitorHdo_Goto6MpnEGoto__v_;
text: .text%__1cTNullCheckEliminatorIdo_value6FppnLInstruction__v_;
text: .text%__1cFLocalPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cFLocalFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cFLocalFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorIdo_Local6MpnFLocal__v_; text: .text%__1cQNullCheckVisitorIdo_Local6MpnFLocal__v_;
text: .text%__1cLAccessFieldPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorMdo_LoadField6MpnJLoadField__v_; text: .text%__1cQNullCheckVisitorMdo_LoadField6MpnJLoadField__v_;
text: .text%__1cTNullCheckEliminatorShandle_AccessField6MpnLAccessField__v_; text: .text%__1cTNullCheckEliminatorShandle_AccessField6MpnLAccessField__v_;
text: .text%__1cCIfPinput_values_do6MpFppnLInstruction__v_v_: c1_Canonicalizer.o;
text: .text%__1cIConstantPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorLdo_Constant6MpnIConstant__v_; text: .text%__1cQNullCheckVisitorLdo_Constant6MpnIConstant__v_;
text: .text%__1cQNullCheckVisitorFdo_If6MpnCIf__v_; text: .text%__1cQNullCheckVisitorFdo_If6MpnCIf__v_;
text: .text%__1cQNullCheckVisitorPdo_ArithmeticOp6MpnMArithmeticOp__v_; text: .text%__1cQNullCheckVisitorPdo_ArithmeticOp6MpnMArithmeticOp__v_;
text: .text%__1cQNullCheckVisitorOdo_LoadIndexed6MpnLLoadIndexed__v_; text: .text%__1cQNullCheckVisitorOdo_LoadIndexed6MpnLLoadIndexed__v_;
text: .text%__1cTNullCheckEliminatorShandle_LoadIndexed6MpnLLoadIndexed__v_; text: .text%__1cTNullCheckEliminatorShandle_LoadIndexed6MpnLLoadIndexed__v_;
text: .text%__1cQNullCheckVisitorNdo_StoreField6MpnKStoreField__v_; text: .text%__1cQNullCheckVisitorNdo_StoreField6MpnKStoreField__v_;
text: .text%__1cGReturnPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorJdo_Return6MpnGReturn__v_; text: .text%__1cQNullCheckVisitorJdo_Return6MpnGReturn__v_;
text: .text%__1cGBitMapQset_intersection6M0_v_; text: .text%__1cGBitMapQset_intersection6M0_v_;
text: .text%__1cLInstructionJas_Invoke6M_pnGInvoke__: c1_Canonicalizer.o; text: .text%__1cLInstructionJas_Invoke6M_pnGInvoke__: c1_Canonicalizer.o;
@ -1811,11 +1727,7 @@ text: .text%__1cKStateSplitFscope6kM_pnHIRScope__;
text: .text%__1cIBlockEndOsubstitute_sux6MpnKBlockBegin_2_v_; text: .text%__1cIBlockEndOsubstitute_sux6MpnKBlockBegin_2_v_;
text: .text%__1cCIRMcompute_code6M_v_; text: .text%__1cCIRMcompute_code6M_v_;
text: .text%__1cJBlockListJblocks_do6MpFpnKBlockBegin__v_v_; text: .text%__1cJBlockListJblocks_do6MpFpnKBlockBegin__v_v_;
text: .text%__1cQUseCountComputerXbasic_compute_use_count6FpnKBlockBegin__v_: c1_IR.o;
text: .text%__1cQUseCountComputerQupdate_use_count6FppnLInstruction__v_: c1_IR.o;
text: .text%__1cFLocalIas_Local6M_p0_: c1_GraphBuilder.o; text: .text%__1cFLocalIas_Local6M_p0_: c1_GraphBuilder.o;
text: .text%__1cKStateSplitPstate_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cKValueStackJvalues_do6MpFppnLInstruction__v_v_;
text: .text%__1cLCompilationIemit_lir6M_v_; text: .text%__1cLCompilationIemit_lir6M_v_;
text: .text%__1cLInstructionGas_Phi6M_pnDPhi__: c1_Canonicalizer.o; text: .text%__1cLInstructionGas_Phi6M_pnDPhi__: c1_Canonicalizer.o;
text: .text%__1cMas_BasicType6FpnJValueType__nJBasicType__; text: .text%__1cMas_BasicType6FpnJValueType__nJBasicType__;
@ -1888,14 +1800,11 @@ text: .text%__1cLOopRecorderIoop_size6M_i_;
text: .text%__1cYDebugInformationRecorderIpcs_size6M_i_; text: .text%__1cYDebugInformationRecorderIpcs_size6M_i_;
text: .text%__1cYDebugInformationRecorderJdata_size6M_i_; text: .text%__1cYDebugInformationRecorderJdata_size6M_i_;
text: .text%__1cHnmethod2n6FIi_pv_; text: .text%__1cHnmethod2n6FIi_pv_;
text: .text%__1cHnmFlagsFclear6M_v_;
text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_; text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_;
text: .text%__1cLOopRecorderHcopy_to6MpnICodeBlob__v_;
text: .text%__1cIUniverseMnon_oop_word6F_pv_; text: .text%__1cIUniverseMnon_oop_word6F_pv_;
text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_; text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_;
text: .text%__1cGPcDesc2t6Miii_v_; text: .text%__1cGPcDesc2t6Miii_v_;
text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_; text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_;
text: .text%__1cFVTuneOcreate_nmethod6FpnHnmethod__v_;
text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_; text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_;
text: .text%__1cKNativeJumpbEcheck_verified_entry_alignment6FpC1_v_; text: .text%__1cKNativeJumpbEcheck_verified_entry_alignment6FpC1_v_;
text: .text%__1cFciEnvKcompile_id6M_I_; text: .text%__1cFciEnvKcompile_id6M_I_;
@ -1938,8 +1847,6 @@ text: .text%__1cNCanonicalizerOdo_NewInstance6MpnLNewInstance__v_;
text: .text%__1cKValueStackMclear_locals6M_v_; text: .text%__1cKValueStackMclear_locals6M_v_;
text: .text%__1cMGraphBuilderIstack_op6MnJBytecodesECode__v_; text: .text%__1cMGraphBuilderIstack_op6MnJBytecodesECode__v_;
text: .text%__1cMGraphBuilderGinvoke6MnJBytecodesECode__v_; text: .text%__1cMGraphBuilderGinvoke6MnJBytecodesECode__v_;
text: .text%__1cFciEnvTget_method_by_index6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cFciEnvYget_method_by_index_impl6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cFciEnvbTget_instance_klass_for_declared_method_holder6FpnHciKlass__pnPciInstanceKlass__; text: .text%__1cFciEnvbTget_instance_klass_for_declared_method_holder6FpnHciKlass__pnPciInstanceKlass__;
text: .text%__1cPciObjectFactoryTget_unloaded_method6MpnPciInstanceKlass_pnIciSymbol_4_pnIciMethod__; text: .text%__1cPciObjectFactoryTget_unloaded_method6MpnPciInstanceKlass_pnIciSymbol_4_pnIciMethod__;
text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_; text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_;
@ -1951,18 +1858,13 @@ text: .text%__1cKValueStackNpop_arguments6Mi_pnGValues__;
text: .text%__1cGInvokeFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cGInvokeFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerJdo_Invoke6MpnGInvoke__v_; text: .text%__1cNCanonicalizerJdo_Invoke6MpnGInvoke__v_;
text: .text%__1cGInvokeJas_Invoke6M_p0_: c1_Instruction.o; text: .text%__1cGInvokeJas_Invoke6M_p0_: c1_Instruction.o;
text: .text%__1cFThrowFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerIdo_Throw6MpnFThrow__v_; text: .text%__1cNCanonicalizerIdo_Throw6MpnFThrow__v_;
text: .text%__1cFThrowIas_Throw6M_p0_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorOdo_NewInstance6MpnLNewInstance__v_; text: .text%__1cQNullCheckVisitorOdo_NewInstance6MpnLNewInstance__v_;
text: .text%__1cTNullCheckEliminatorShandle_NewInstance6MpnLNewInstance__v_; text: .text%__1cTNullCheckEliminatorShandle_NewInstance6MpnLNewInstance__v_;
text: .text%__1cGInvokePinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorJdo_Invoke6MpnGInvoke__v_; text: .text%__1cQNullCheckVisitorJdo_Invoke6MpnGInvoke__v_;
text: .text%__1cTNullCheckEliminatorNhandle_Invoke6MpnGInvoke__v_; text: .text%__1cTNullCheckEliminatorNhandle_Invoke6MpnGInvoke__v_;
text: .text%__1cFThrowPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorIdo_Throw6MpnFThrow__v_; text: .text%__1cQNullCheckVisitorIdo_Throw6MpnFThrow__v_;
text: .text%__1cLInstructionGnegate6Fn0AJCondition__1_; text: .text%__1cLInstructionGnegate6Fn0AJCondition__1_;
text: .text%__1cFThrowPstate_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIVoidTypeLas_VoidType6M_p0_: c1_ValueType.o; text: .text%__1cIVoidTypeLas_VoidType6M_p0_: c1_ValueType.o;
text: .text%__1cLNewInstanceKexact_type6kM_pnGciType__; text: .text%__1cLNewInstanceKexact_type6kM_pnGciType__;
text: .text%__1cLNewInstanceOas_NewInstance6M_p0_: c1_Instruction.o; text: .text%__1cLNewInstanceOas_NewInstance6M_p0_: c1_Instruction.o;
@ -1975,7 +1877,6 @@ text: .text%__1cPNewInstanceStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_x86.o;
text: .text%__1cNLIR_AssemblerJemit_call6MpnOLIR_OpJavaCall__v_; text: .text%__1cNLIR_AssemblerJemit_call6MpnOLIR_OpJavaCall__v_;
text: .text%__1cNLIR_AssemblerKalign_call6MnILIR_Code__v_; text: .text%__1cNLIR_AssemblerKalign_call6MnILIR_Code__v_;
text: .text%__1cICodeStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_x86.o; text: .text%__1cICodeStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_x86.o;
text: .text%__1cNLIR_AssemblerEcall6MpCnJrelocInfoJrelocType_pnMCodeEmitInfo__v_;
text: .text%__1cbBopt_virtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cbBopt_virtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cYinternal_word_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cYinternal_word_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cJrelocInfoKset_format6Mi_v_; text: .text%__1cJrelocInfoKset_format6Mi_v_;
@ -2021,9 +1922,6 @@ text: .text%__1cMGraphBuilderUclear_inline_bailout6M_v_;
text: .text%__1cMGraphBuilderWrecursive_inline_level6kMpnIciMethod__i_; text: .text%__1cMGraphBuilderWrecursive_inline_level6kMpnIciMethod__i_;
text: .text%__1cPciObjectFactoryMvm_symbol_at6Fi_pnIciSymbol__; text: .text%__1cPciObjectFactoryMvm_symbol_at6Fi_pnIciSymbol__;
text: .text%__1cNCanonicalizerMdo_NullCheck6MpnJNullCheck__v_; text: .text%__1cNCanonicalizerMdo_NullCheck6MpnJNullCheck__v_;
text: .text%__1cKValueStackKpush_scope6MpnHIRScope__p0_;
text: .text%__1cHIRScopeXcompute_lock_stack_size6M_v_;
text: .text%__1cMGraphBuilderJScopeDataRcaller_stack_size6kM_i_;
text: .text%__1cMGraphBuilderJScopeDataLnum_returns6M_i_; text: .text%__1cMGraphBuilderJScopeDataLnum_returns6M_i_;
text: .text%__1cMGraphBuilderJScopeDataXset_inline_cleanup_info6MpnKBlockBegin_pnLInstruction_pnKValueStack__v_; text: .text%__1cMGraphBuilderJScopeDataXset_inline_cleanup_info6MpnKBlockBegin_pnLInstruction_pnKValueStack__v_;
text: .text%__1cMGraphBuilderJScopeDataQincr_num_returns6M_v_; text: .text%__1cMGraphBuilderJScopeDataQincr_num_returns6M_v_;
@ -2033,12 +1931,8 @@ text: .text%__1cFciEnvVnotice_inlined_method6MpnIciMethod__v_;
text: .text%__1cMLinkResolverbCresolve_special_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__; text: .text%__1cMLinkResolverbCresolve_special_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__;
text: .text%__1cMGraphBuilderOinline_bailout6Mpkc_v_; text: .text%__1cMGraphBuilderOinline_bailout6Mpkc_v_;
text: .text%__1cLInstructionEprev6MpnKBlockBegin__p0_; text: .text%__1cLInstructionEprev6MpnKBlockBegin__p0_;
text: .text%__1cKBlockBeginPblock_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIConstantPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIBlockEndPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cQNullCheckVisitorMdo_NullCheck6MpnJNullCheck__v_; text: .text%__1cQNullCheckVisitorMdo_NullCheck6MpnJNullCheck__v_;
text: .text%__1cTNullCheckEliminatorQhandle_NullCheck6MpnJNullCheck__v_; text: .text%__1cTNullCheckEliminatorQhandle_NullCheck6MpnJNullCheck__v_;
text: .text%__1cHIRScopeNtop_scope_bci6kM_i_;
text: .text%__1cNSharedRuntimeQfind_callee_info6FpnKJavaThread_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__; text: .text%__1cNSharedRuntimeQfind_callee_info6FpnKJavaThread_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__;
text: .text%__1cGPcDescHreal_pc6kMpknHnmethod__pC_; text: .text%__1cGPcDescHreal_pc6kMpknHnmethod__pC_;
text: .text%__1cLPcDescCacheLadd_pc_desc6MpnGPcDesc__v_; text: .text%__1cLPcDescCacheLadd_pc_desc6MpnGPcDesc__v_;
@ -2055,14 +1949,11 @@ text: .text%__1cKNativeCallXset_destination_mt_safe6MpC_v_;
text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_; text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_;
text: .text%jni_GetByteArrayRegion: jni.o; text: .text%jni_GetByteArrayRegion: jni.o;
text: .text%JVM_DefineClassWithSource; text: .text%JVM_DefineClassWithSource;
text: .text%__1cXjvm_define_class_common6FpnHJNIEnv__pkcpnI_jobject_pkWi53pnGThread__pnH_jclass__: jvm.o;
text: .text%__1cQSystemDictionaryTresolve_from_stream6FnMsymbolHandle_nGHandle_2pnPClassFileStream_pnGThread__pnMklassOopDesc__;
text: .text%__1cPClassFileParserbDverify_legal_method_signature6MnMsymbolHandle_1pnGThread__i_; text: .text%__1cPClassFileParserbDverify_legal_method_signature6MnMsymbolHandle_1pnGThread__i_;
text: .text%__1cPClassFileParserXverify_legal_class_name6MnMsymbolHandle_pnGThread__v_; text: .text%__1cPClassFileParserXverify_legal_class_name6MnMsymbolHandle_pnGThread__v_;
text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_; text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_;
text: .text%__1cKDictionaryVadd_protection_domain6MiInTinstanceKlassHandle_nGHandle_2pnGThread__v_; text: .text%__1cKDictionaryVadd_protection_domain6MiInTinstanceKlassHandle_nGHandle_2pnGThread__v_;
text: .text%__1cPDictionaryEntryVadd_protection_domain6MpnHoopDesc__v_; text: .text%__1cPDictionaryEntryVadd_protection_domain6MpnHoopDesc__v_;
text: .text%__1cUverify_byte_codes_fn6F_pv_: verifier.o;
text: .text%JVM_GetClassCPEntriesCount; text: .text%JVM_GetClassCPEntriesCount;
text: .text%JVM_GetClassCPTypes; text: .text%JVM_GetClassCPTypes;
text: .text%JVM_GetClassNameUTF; text: .text%JVM_GetClassNameUTF;
@ -2124,7 +2015,6 @@ text: .text%__1cIOSThread2T6M_v_;
text: .text%__1cIOSThreadKpd_destroy6M_v_; text: .text%__1cIOSThreadKpd_destroy6M_v_;
text: .text%jni_DestroyJavaVM; text: .text%jni_DestroyJavaVM;
text: .text%jni_AttachCurrentThread; text: .text%jni_AttachCurrentThread;
text: .text%attach_current_thread: jni.o;
text: .text%__1cKJavaThreadVinvoke_shutdown_hooks6M_v_; text: .text%__1cKJavaThreadVinvoke_shutdown_hooks6M_v_;
text: .text%__1cLbefore_exit6FpnKJavaThread__v_; text: .text%__1cLbefore_exit6FpnKJavaThread__v_;
text: .text%__1cNWatcherThreadEstop6F_v_; text: .text%__1cNWatcherThreadEstop6F_v_;
@ -2139,7 +2029,6 @@ text: .text%__1cCosXterminate_signal_thread6F_v_;
text: .text%__1cCosNsigexitnum_pd6F_i_; text: .text%__1cCosNsigexitnum_pd6F_i_;
text: .text%__1cCosNsignal_notify6Fi_v_; text: .text%__1cCosNsignal_notify6Fi_v_;
text: .text%__1cQprint_statistics6F_v_; text: .text%__1cQprint_statistics6F_v_;
text: .text%__1cFVTuneEexit6F_v_;
text: .text%__1cIVMThreadXwait_for_vm_thread_exit6F_v_; text: .text%__1cIVMThreadXwait_for_vm_thread_exit6F_v_;
text: .text%__1cUSafepointSynchronizeFbegin6F_v_; text: .text%__1cUSafepointSynchronizeFbegin6F_v_;
text: .text%__1cORuntimeServiceWrecord_safepoint_begin6F_v_; text: .text%__1cORuntimeServiceWrecord_safepoint_begin6F_v_;
@ -2158,7 +2047,6 @@ text: .text%__1cQSystemDictionaryRnumber_of_classes6F_i_;
text: .text%__1cQSystemDictionaryStry_get_next_class6F_pnMklassOopDesc__; text: .text%__1cQSystemDictionaryStry_get_next_class6F_pnMklassOopDesc__;
text: .text%__1cKDictionaryStry_get_next_class6M_pnMklassOopDesc__; text: .text%__1cKDictionaryStry_get_next_class6M_pnMklassOopDesc__;
text: .text%__1cNinstanceKlassKmethods_do6MpFpnNmethodOopDesc__v_v_; text: .text%__1cNinstanceKlassKmethods_do6MpFpnNmethodOopDesc__v_v_;
text: .text%__1cONMethodSweeperFsweep6F_v_;
text: .text%__1cNCompileBrokerQset_should_block6F_v_; text: .text%__1cNCompileBrokerQset_should_block6F_v_;
text: .text%__1cHVM_ExitbJwait_for_threads_in_native_to_block6F_i_; text: .text%__1cHVM_ExitbJwait_for_threads_in_native_to_block6F_i_;
text: .text%__1cIVMThreadHdestroy6F_v_; text: .text%__1cIVMThreadHdestroy6F_v_;
@ -2171,8 +2059,6 @@ text: .text%__1cPPerfDataManagerHdestroy6F_v_;
text: .text%__1cIPerfData2T6M_v_; text: .text%__1cIPerfData2T6M_v_;
text: .text%__1cKPerfMemoryHdestroy6F_v_; text: .text%__1cKPerfMemoryHdestroy6F_v_;
text: .text%__1cKPerfMemoryUdelete_memory_region6F_v_; text: .text%__1cKPerfMemoryUdelete_memory_region6F_v_;
text: .text%__1cUdelete_shared_memory6FpcI_v_: perfMemory_solaris.o;
text: .text%__1cLremove_file6Fpkc_v_: perfMemory_solaris.o;
text: .text%__1cMostream_exit6F_v_; text: .text%__1cMostream_exit6F_v_;
text: .text%__SLIP.DELETER__C: ostream.o; text: .text%__SLIP.DELETER__C: ostream.o;
text: .text%JVM_Halt; text: .text%JVM_Halt;
@ -2206,14 +2092,10 @@ text: .text%jni_IsAssignableFrom: jni.o;
text: .text%__1cOGenerateOopMapGdo_ldc6Mii_v_; text: .text%__1cOGenerateOopMapGdo_ldc6Mii_v_;
text: .text%__1cQComputeCallStackIdo_array6Mii_v_: generateOopMap.o; text: .text%__1cQComputeCallStackIdo_array6Mii_v_: generateOopMap.o;
text: .text%__1cMGraphBuilderNload_constant6M_v_; text: .text%__1cMGraphBuilderNload_constant6M_v_;
text: .text%__1cQciBytecodeStreamSget_constant_index6kM_i_;
text: .text%__1cFciEnvVget_constant_by_index6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cFciEnvbAget_constant_by_index_impl6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cMLinkResolverbBresolve_static_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__; text: .text%__1cMLinkResolverbBresolve_static_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__;
text: .text%__1cJValueTypeLas_VoidType6M_pnIVoidType__: c1_Canonicalizer.o; text: .text%__1cJValueTypeLas_VoidType6M_pnIVoidType__: c1_Canonicalizer.o;
text: .text%__1cWstatic_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cWstatic_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cRComputeEntryStackIdo_array6Mii_v_: generateOopMap.o; text: .text%__1cRComputeEntryStackIdo_array6Mii_v_: generateOopMap.o;
text: .text%__1cEIfOpPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cEIfOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cEIfOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorHdo_IfOp6MpnEIfOp__v_; text: .text%__1cQNullCheckVisitorHdo_IfOp6MpnEIfOp__v_;
text: .text%__1cOGenerateOopMapMdo_checkcast6M_v_; text: .text%__1cOGenerateOopMapMdo_checkcast6M_v_;
@ -2223,12 +2105,10 @@ text: .text%__1cNCanonicalizerNdo_InstanceOf6MpnKInstanceOf__v_;
text: .text%__1cKInstanceOfNas_InstanceOf6M_p0_: c1_GraphBuilder.o; text: .text%__1cKInstanceOfNas_InstanceOf6M_p0_: c1_GraphBuilder.o;
text: .text%__1cMGraphBuilderKcheck_cast6Mi_v_; text: .text%__1cMGraphBuilderKcheck_cast6Mi_v_;
text: .text%__1cNCanonicalizerMdo_CheckCast6MpnJCheckCast__v_; text: .text%__1cNCanonicalizerMdo_CheckCast6MpnJCheckCast__v_;
text: .text%__1cJTypeCheckPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorNdo_InstanceOf6MpnKInstanceOf__v_; text: .text%__1cQNullCheckVisitorNdo_InstanceOf6MpnKInstanceOf__v_;
text: .text%__1cQNullCheckVisitorMdo_CheckCast6MpnJCheckCast__v_; text: .text%__1cQNullCheckVisitorMdo_CheckCast6MpnJCheckCast__v_;
text: .text%__1cTSimpleExceptionStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o; text: .text%__1cTSimpleExceptionStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o;
text: .text%__1cNLIR_AssemblerQemit_opTypeCheck6MpnPLIR_OpTypeCheck__v_; text: .text%__1cNLIR_AssemblerQemit_opTypeCheck6MpnPLIR_OpTypeCheck__v_;
text: .text%__1cIciObjectIencoding6M_pnI_jobject__;
text: .text%__1cTSimpleExceptionStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_x86.o; text: .text%__1cTSimpleExceptionStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_x86.o;
text: .text%__1cTSimpleExceptionStubJemit_code6MpnNLIR_Assembler__v_; text: .text%__1cTSimpleExceptionStubJemit_code6MpnNLIR_Assembler__v_;
text: .text%__1cJLoadFieldMas_LoadField6M_p0_: c1_Instruction.o; text: .text%__1cJLoadFieldMas_LoadField6M_p0_: c1_Instruction.o;
@ -2266,8 +2146,6 @@ text: .text%__1cHConvertEname6kM_pkc_: c1_GraphBuilder.o;
text: .text%__1cMGraphBuilderNstore_indexed6MnJBasicType__v_; text: .text%__1cMGraphBuilderNstore_indexed6MnJBasicType__v_;
text: .text%__1cIValueMapKkill_array6MpnJValueType__v_; text: .text%__1cIValueMapKkill_array6MpnJValueType__v_;
text: .text%__1cNCanonicalizerPdo_StoreIndexed6MpnMStoreIndexed__v_; text: .text%__1cNCanonicalizerPdo_StoreIndexed6MpnMStoreIndexed__v_;
text: .text%__1cLAccessFieldPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cHConvertPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorKdo_LogicOp6MpnHLogicOp__v_; text: .text%__1cQNullCheckVisitorKdo_LogicOp6MpnHLogicOp__v_;
text: .text%__1cQNullCheckVisitorKdo_Convert6MpnHConvert__v_; text: .text%__1cQNullCheckVisitorKdo_Convert6MpnHConvert__v_;
text: .text%__1cQNullCheckVisitorPdo_StoreIndexed6MpnMStoreIndexed__v_; text: .text%__1cQNullCheckVisitorPdo_StoreIndexed6MpnMStoreIndexed__v_;
@ -2294,10 +2172,8 @@ text: .text%__1cMGraphBuilderOnew_type_array6M_v_;
text: .text%__1cMNewTypeArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cMNewTypeArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerPdo_NewTypeArray6MpnMNewTypeArray__v_; text: .text%__1cNCanonicalizerPdo_NewTypeArray6MpnMNewTypeArray__v_;
text: .text%__1cNCanonicalizerMdo_Intrinsic6MpnJIntrinsic__v_; text: .text%__1cNCanonicalizerMdo_Intrinsic6MpnJIntrinsic__v_;
text: .text%__1cLAccessArrayPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorOdo_ArrayLength6MpnLArrayLength__v_; text: .text%__1cQNullCheckVisitorOdo_ArrayLength6MpnLArrayLength__v_;
text: .text%__1cTNullCheckEliminatorShandle_ArrayLength6MpnLArrayLength__v_; text: .text%__1cTNullCheckEliminatorShandle_ArrayLength6MpnLArrayLength__v_;
text: .text%__1cINewArrayPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorPdo_NewTypeArray6MpnMNewTypeArray__v_; text: .text%__1cQNullCheckVisitorPdo_NewTypeArray6MpnMNewTypeArray__v_;
text: .text%__1cTNullCheckEliminatorPhandle_NewArray6MpnINewArray__v_; text: .text%__1cTNullCheckEliminatorPhandle_NewArray6MpnINewArray__v_;
text: .text%__1cQNullCheckVisitorMdo_Intrinsic6MpnJIntrinsic__v_; text: .text%__1cQNullCheckVisitorMdo_Intrinsic6MpnJIntrinsic__v_;
@ -2309,7 +2185,6 @@ text: .text%__1cMNewTypeArrayKexact_type6kM_pnGciType__;
text: .text%__1cLArrayLengthOas_ArrayLength6M_p0_: c1_GraphBuilder.o; text: .text%__1cLArrayLengthOas_ArrayLength6M_p0_: c1_GraphBuilder.o;
text: .text%__1cILIR_ListUunsigned_shift_right6MpnLLIR_OprDesc_222_v_; text: .text%__1cILIR_ListUunsigned_shift_right6MpnLLIR_OprDesc_222_v_;
text: .text%__1cQNewTypeArrayStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o; text: .text%__1cQNewTypeArrayStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o;
text: .text%__1cNLIR_AssemblerHic_call6MpCpnMCodeEmitInfo__v_;
text: .text%__1cXvirtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cXvirtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cNLIR_AssemblerQemit_alloc_array6MpnQLIR_OpAllocArray__v_; text: .text%__1cNLIR_AssemblerQemit_alloc_array6MpnQLIR_OpAllocArray__v_;
text: .text%__1cNLIR_AssemblerSarray_element_size6kMnJBasicType__nHAddressLScaleFactor__; text: .text%__1cNLIR_AssemblerSarray_element_size6kMnJBasicType__nHAddressLScaleFactor__;
@ -2335,8 +2210,6 @@ text: .text%__1cRInlineCacheBufferLnew_ic_stub6F_pnGICStub__;
text: .text%JVM_NewArray; text: .text%JVM_NewArray;
text: .text%__1cKReflectionRreflect_new_array6FpnHoopDesc_ipnGThread__pnMarrayOopDesc__; text: .text%__1cKReflectionRreflect_new_array6FpnHoopDesc_ipnGThread__pnMarrayOopDesc__;
text: .text%__1cSInterpreterRuntimeOmultianewarray6FpnKJavaThread_pi_v_; text: .text%__1cSInterpreterRuntimeOmultianewarray6FpnKJavaThread_pi_v_;
text: .text%__1cNinstanceKlassSlookup_osr_nmethod6kMkpnNmethodOopDesc_i_pnHnmethod__;
text: .text%__1cQSimpleCompPolicyYmethod_back_branch_event6MnMmethodHandle_iipnGThread__v_;
text: .text%__1cMGraphBuilderQnew_object_array6M_v_; text: .text%__1cMGraphBuilderQnew_object_array6M_v_;
text: .text%__1cONewObjectArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cONewObjectArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerRdo_NewObjectArray6MpnONewObjectArray__v_; text: .text%__1cNCanonicalizerRdo_NewObjectArray6MpnONewObjectArray__v_;
@ -2349,7 +2222,6 @@ text: .text%__1cHShiftOpEhash6kM_i_: c1_GraphBuilder.o;
text: .text%__1cHShiftOpEname6kM_pkc_: c1_GraphBuilder.o; text: .text%__1cHShiftOpEname6kM_pkc_: c1_GraphBuilder.o;
text: .text%__1cLLoadIndexedOas_LoadIndexed6M_p0_: c1_Instruction.o; text: .text%__1cLLoadIndexedOas_LoadIndexed6M_p0_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorRdo_NewObjectArray6MpnONewObjectArray__v_; text: .text%__1cQNullCheckVisitorRdo_NewObjectArray6MpnONewObjectArray__v_;
text: .text%__1cDOp2Pinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorKdo_ShiftOp6MpnHShiftOp__v_; text: .text%__1cQNullCheckVisitorKdo_ShiftOp6MpnHShiftOp__v_;
text: .text%__1cHciKlassMaccess_flags6M_i_; text: .text%__1cHciKlassMaccess_flags6M_i_;
text: .text%__1cPciObjArrayKlassEmake6FpnHciKlass__p0_; text: .text%__1cPciObjArrayKlassEmake6FpnHciKlass__p0_;
@ -2413,7 +2285,6 @@ text: .text%__1cQDefNewGenerationTallocate_from_space6MI_pnIHeapWord__;
text: .text%__1cPVM_GC_OperationZacquire_pending_list_lock6M_v_; text: .text%__1cPVM_GC_OperationZacquire_pending_list_lock6M_v_;
text: .text%__1cQinstanceRefKlassZacquire_pending_list_lock6FpnJBasicLock__v_; text: .text%__1cQinstanceRefKlassZacquire_pending_list_lock6FpnJBasicLock__v_;
text: .text%__1cbAVM_GenCollectForAllocationEdoit6M_v_; text: .text%__1cbAVM_GenCollectForAllocationEdoit6M_v_;
text: .text%__1cPGCMemoryManagerIgc_begin6M_v_;
text: .text%__1cKManagementJtimestamp6F_x_; text: .text%__1cKManagementJtimestamp6F_x_;
text: .text%__1cTContiguousSpacePoolQget_memory_usage6M_nLMemoryUsage__; text: .text%__1cTContiguousSpacePoolQget_memory_usage6M_nLMemoryUsage__;
text: .text%__1cTContiguousSpacePoolNused_in_bytes6M_I_: memoryPool.o; text: .text%__1cTContiguousSpacePoolNused_in_bytes6M_I_: memoryPool.o;
@ -2441,11 +2312,7 @@ text: .text%__1cKSharedHeapbAchange_strong_roots_parity6M_v_;
text: .text%__1cPFastScanClosureGdo_oop6MppnHoopDesc__v_: defNewGeneration.o; text: .text%__1cPFastScanClosureGdo_oop6MppnHoopDesc__v_: defNewGeneration.o;
text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_; text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_;
text: .text%__1cOJNIHandleBlockHoops_do6MpnKOopClosure__v_; text: .text%__1cOJNIHandleBlockHoops_do6MpnKOopClosure__v_;
text: .text%__1cHThreadsHoops_do6FpnKOopClosure__v_;
text: .text%__1cKJavaThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cGThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cKHandleAreaHoops_do6MpnKOopClosure__v_; text: .text%__1cKHandleAreaHoops_do6MpnKOopClosure__v_;
text: .text%__1cNchunk_oops_do6FpnKOopClosure_pnFChunk_pc_I_: handles.o;
text: .text%__1cFframeVinterpreter_frame_bci6kM_i_; text: .text%__1cFframeVinterpreter_frame_bci6kM_i_;
text: .text%__1cFframebDinterpreter_frame_monitor_end6kM_pnPBasicObjectLock__; text: .text%__1cFframebDinterpreter_frame_monitor_end6kM_pnPBasicObjectLock__;
text: .text%__1cFframebFinterpreter_frame_monitor_begin6kM_pnPBasicObjectLock__; text: .text%__1cFframebFinterpreter_frame_monitor_begin6kM_pnPBasicObjectLock__;
@ -2483,7 +2350,6 @@ text: .text%__1cQComputeCallStackHdo_long6M_v_: generateOopMap.o;
text: .text%__1cOGenerateOopMapOdo_monitorexit6Mi_v_; text: .text%__1cOGenerateOopMapOdo_monitorexit6Mi_v_;
text: .text%__1cOGenerateOopMapLmonitor_pop6M_nNCellTypeState__; text: .text%__1cOGenerateOopMapLmonitor_pop6M_nNCellTypeState__;
text: .text%__1cRComputeEntryStackHdo_long6M_v_: generateOopMap.o; text: .text%__1cRComputeEntryStackHdo_long6M_v_: generateOopMap.o;
text: .text%__1cIVMThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cQVMOperationQdDueueHoops_do6MpnKOopClosure__v_; text: .text%__1cQVMOperationQdDueueHoops_do6MpnKOopClosure__v_;
text: .text%__1cQVMOperationQdDueueNqueue_oops_do6MipnKOopClosure__v_; text: .text%__1cQVMOperationQdDueueNqueue_oops_do6MipnKOopClosure__v_;
text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_; text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_;
@ -2584,7 +2450,6 @@ text: .text%__1cUGenGCEpilogueClosureNdo_generation6MpnKGeneration__v_: genColle
text: .text%__1cRTenuredGenerationPupdate_counters6M_v_; text: .text%__1cRTenuredGenerationPupdate_counters6M_v_;
text: .text%__1cUCompactingPermGenGenPupdate_counters6M_v_; text: .text%__1cUCompactingPermGenGenPupdate_counters6M_v_;
text: .text%__1cXTraceMemoryManagerStats2T6M_v_; text: .text%__1cXTraceMemoryManagerStats2T6M_v_;
text: .text%__1cPGCMemoryManagerGgc_end6M_v_;
text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_; text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_;
text: .text%__1cNJvmtiGCMarker2T6M_v_; text: .text%__1cNJvmtiGCMarker2T6M_v_;
text: .text%__1cPVM_GC_OperationNdoit_epilogue6M_v_; text: .text%__1cPVM_GC_OperationNdoit_epilogue6M_v_;
@ -2597,7 +2462,6 @@ text: .text%jni_PopLocalFrame: jni.o;
text: .text%__1cMGraphBuilderJnegate_op6MpnJValueType__v_; text: .text%__1cMGraphBuilderJnegate_op6MpnJValueType__v_;
text: .text%__1cINegateOpFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cINegateOpFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerLdo_NegateOp6MpnINegateOp__v_; text: .text%__1cNCanonicalizerLdo_NegateOp6MpnINegateOp__v_;
text: .text%__1cINegateOpPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorLdo_NegateOp6MpnINegateOp__v_; text: .text%__1cQNullCheckVisitorLdo_NegateOp6MpnINegateOp__v_;
text: .text%__1cILIR_ListLstore_check6MpnLLIR_OprDesc_2222pnMCodeEmitInfo__v_; text: .text%__1cILIR_ListLstore_check6MpnLLIR_OprDesc_2222pnMCodeEmitInfo__v_;
text: .text%__1cXArrayStoreExceptionStub2t6MpnMCodeEmitInfo__v_; text: .text%__1cXArrayStoreExceptionStub2t6MpnMCodeEmitInfo__v_;
@ -2637,7 +2501,6 @@ text: .text%__1cYjava_lang_reflect_MethodLreturn_type6FpnHoopDesc__2_;
text: .text%JVM_IsInterrupted; text: .text%JVM_IsInterrupted;
text: .text%__1cTresource_free_bytes6FpcI_v_; text: .text%__1cTresource_free_bytes6FpcI_v_;
text: .text%__1cRComputeEntryStackHdo_bool6M_v_: generateOopMap.o; text: .text%__1cRComputeEntryStackHdo_bool6M_v_: generateOopMap.o;
text: .text%__1cMArithmeticOpKlock_stack6kM_pnKValueStack__: c1_Instruction.o;
text: .text%__1cJAssemblerGfist_s6MnHAddress__v_; text: .text%__1cJAssemblerGfist_s6MnHAddress__v_;
text: .text%__1cNLIR_AssemblerJreset_FPU6M_v_; text: .text%__1cNLIR_AssemblerJreset_FPU6M_v_;
text: .text%__1cNLIR_AssemblerIemit_op36MpnHLIR_Op3__v_; text: .text%__1cNLIR_AssemblerIemit_op36MpnHLIR_Op3__v_;
@ -2659,7 +2522,6 @@ text: .text%__1cUBytecode_tableswitchOdest_offset_at6kMi_i_;
text: .text%__1cMGraphBuilderMtable_switch6M_v_; text: .text%__1cMGraphBuilderMtable_switch6M_v_;
text: .text%__1cLTableSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cLTableSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerOdo_TableSwitch6MpnLTableSwitch__v_; text: .text%__1cNCanonicalizerOdo_TableSwitch6MpnLTableSwitch__v_;
text: .text%__1cGSwitchPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorOdo_TableSwitch6MpnLTableSwitch__v_; text: .text%__1cQNullCheckVisitorOdo_TableSwitch6MpnLTableSwitch__v_;
text: .text%__1cWstatic_call_RelocationLstatic_stub6M_pC_; text: .text%__1cWstatic_call_RelocationLstatic_stub6M_pC_;
text: .text%__1cSCompiledStaticCallMset_to_clean6M_v_; text: .text%__1cSCompiledStaticCallMset_to_clean6M_v_;
@ -2672,7 +2534,6 @@ text: .text%__1cNFingerprinterHdo_byte6M_v_: dump.o;
text: .text%Unsafe_SetMemory; text: .text%Unsafe_SetMemory;
text: .text%__1cNSharedRuntimeElrem6Fxx_x_; text: .text%__1cNSharedRuntimeElrem6Fxx_x_;
text: .text%Unsafe_DefineClass1; text: .text%Unsafe_DefineClass1;
text: .text%__1cSUnsafe_DefineClass6FpnHJNIEnv__pnI_jstring_pnL_jbyteArray_iipnI_jobject_7_pnH_jclass__: unsafe.o;
text: .text%JVM_DefineClass; text: .text%JVM_DefineClass;
text: .text%__1cVLoaderConstraintTableYextend_loader_constraint6MpnVLoaderConstraintEntry_nGHandle_pnMklassOopDesc__v_; text: .text%__1cVLoaderConstraintTableYextend_loader_constraint6MpnVLoaderConstraintEntry_nGHandle_pnMklassOopDesc__v_;
text: .text%__1cVLoaderConstraintTablebHensure_loader_constraint_capacity6MpnVLoaderConstraintEntry_i_v_; text: .text%__1cVLoaderConstraintTablebHensure_loader_constraint_capacity6MpnVLoaderConstraintEntry_i_v_;
@ -2680,7 +2541,6 @@ text: .text%__1cIRuntime1Tprimitive_arraycopy6FpnIHeapWord_2i_v_;
text: .text%__1cRComputeEntryStackHdo_char6M_v_: generateOopMap.o; text: .text%__1cRComputeEntryStackHdo_char6M_v_: generateOopMap.o;
text: .text%jni_NewDirectByteBuffer; text: .text%jni_NewDirectByteBuffer;
text: .text%lookupDirectBufferClasses: jni.o; text: .text%lookupDirectBufferClasses: jni.o;
text: .text%__1cJlookupOne6FpnHJNIEnv__pkcpnGThread__pnH_jclass__: jni.o;
text: .text%__1cHJNIEnv_JNewObject6MpnH_jclass_pnK_jmethodID_E_pnI_jobject__: jni.o; text: .text%__1cHJNIEnv_JNewObject6MpnH_jclass_pnK_jmethodID_E_pnI_jobject__: jni.o;
text: .text%jni_GetDoubleArrayRegion: jni.o; text: .text%jni_GetDoubleArrayRegion: jni.o;
text: .text%__1cNSignatureInfoJdo_double6M_v_: bytecode.o; text: .text%__1cNSignatureInfoJdo_double6M_v_: bytecode.o;
@ -2701,7 +2561,6 @@ text: .text%JVM_GetCPFieldModifiers;
text: .text%__1cPClassFileParserUverify_constantvalue6MiinSconstantPoolHandle_pnGThread__v_; text: .text%__1cPClassFileParserUverify_constantvalue6MiinSconstantPoolHandle_pnGThread__v_;
text: .text%JVM_MonitorNotify; text: .text%JVM_MonitorNotify;
text: .text%__1cSObjectSynchronizerGnotify6FnGHandle_pnGThread__v_; text: .text%__1cSObjectSynchronizerGnotify6FnGHandle_pnGThread__v_;
text: .text%__1cKValueStackElock6MpnHIRScope_pnLInstruction__i_;
text: .text%__1cKValueStackGunlock6M_i_; text: .text%__1cKValueStackGunlock6M_i_;
text: .text%__1cQMonitorEnterStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o; text: .text%__1cQMonitorEnterStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_x86.o;
text: .text%__1cNLIR_AssemblerJemit_lock6MpnKLIR_OpLock__v_; text: .text%__1cNLIR_AssemblerJemit_lock6MpnKLIR_OpLock__v_;
@ -2726,8 +2585,6 @@ text: .text%__1cbCOneContigSpaceCardGenerationLused_region6kM_nJMemRegion__;
text: .text%__1cMGenMarkSweepPallocate_stacks6F_v_; text: .text%__1cMGenMarkSweepPallocate_stacks6F_v_;
text: .text%__1cQGenCollectedHeapOgather_scratch6MpnKGeneration_I_pnMScratchBlock__; text: .text%__1cQGenCollectedHeapOgather_scratch6MpnKGeneration_I_pnMScratchBlock__;
text: .text%__1cQDefNewGenerationScontribute_scratch6MrpnMScratchBlock_pnKGeneration_I_v_; text: .text%__1cQDefNewGenerationScontribute_scratch6MrpnMScratchBlock_pnKGeneration_I_v_;
text: .text%__1cRsort_scratch_list6FrpnMScratchBlock__v_: genCollectedHeap.o;
text: .text%__1cVremoveSmallestScratch6FppnMScratchBlock__1_: genCollectedHeap.o;
text: .text%__1cJMarkSweepRFollowRootClosureGdo_oop6MppnHoopDesc__v_: markSweep.o; text: .text%__1cJMarkSweepRFollowRootClosureGdo_oop6MppnHoopDesc__v_: markSweep.o;
text: .text%__1cParrayKlassKlassToop_follow_contents6MpnHoopDesc__v_; text: .text%__1cParrayKlassKlassToop_follow_contents6MpnHoopDesc__v_;
text: .text%__1cLklassVtableToop_follow_contents6M_v_; text: .text%__1cLklassVtableToop_follow_contents6M_v_;
@ -2784,12 +2641,6 @@ text: .text%__1cbCOneContigSpaceCardGenerationWfirst_compaction_space6kM_pnQComp
text: .text%__1cMGenMarkSweepRmark_sweep_phase36Fi_v_; text: .text%__1cMGenMarkSweepRmark_sweep_phase36Fi_v_;
text: .text%__1cUCompactingPermGenGenTpre_adjust_pointers6M_v_; text: .text%__1cUCompactingPermGenGenTpre_adjust_pointers6M_v_;
text: .text%__1cJMarkSweepUAdjustPointerClosureGdo_oop6MppnHoopDesc__v_: markSweep.o; text: .text%__1cJMarkSweepUAdjustPointerClosureGdo_oop6MppnHoopDesc__v_: markSweep.o;
text: .text%__1cJCodeCacheHoops_do6FpnKOopClosure__v_;
text: .text%__1cKBufferBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cSDeoptimizationBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cLRuntimeStubHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cNSafepointBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cHnmethodHoops_do6MpnKOopClosure__v_;
text: .text%__1cJHashtableHoops_do6MpnKOopClosure__v_; text: .text%__1cJHashtableHoops_do6MpnKOopClosure__v_;
text: .text%__1cJMarkSweepMadjust_marks6F_v_; text: .text%__1cJMarkSweepMadjust_marks6F_v_;
text: .text%__1cYGenAdjustPointersClosureNdo_generation6MpnKGeneration__v_: genMarkSweep.o; text: .text%__1cYGenAdjustPointersClosureNdo_generation6MpnKGeneration__v_: genMarkSweep.o;
@ -2840,7 +2691,6 @@ text: .text%Unsafe_CompareAndSwapObject;
text: .text%__1cMLinkResolverbEvtable_index_of_miranda_method6FnLKlassHandle_nMsymbolHandle_2pnGThread__i_; text: .text%__1cMLinkResolverbEvtable_index_of_miranda_method6FnLKlassHandle_nMsymbolHandle_2pnGThread__i_;
text: .text%__1cLklassVtableQindex_of_miranda6MpnNsymbolOopDesc_2_i_; text: .text%__1cLklassVtableQindex_of_miranda6MpnNsymbolOopDesc_2_i_;
text: .text%__1cRPrivilegedElementHoops_do6MpnKOopClosure__v_; text: .text%__1cRPrivilegedElementHoops_do6MpnKOopClosure__v_;
text: .text%__1cFframeRoops_code_blob_do6MpnKOopClosure_pknLRegisterMap__v_;
text: .text%__1cMOopMapStream2t6MpnGOopMap_i_v_; text: .text%__1cMOopMapStream2t6MpnGOopMap_i_v_;
text: .text%__1cQComputeCallStackIdo_float6M_v_: generateOopMap.o; text: .text%__1cQComputeCallStackIdo_float6M_v_: generateOopMap.o;
text: .text%jni_DeleteWeakGlobalRef: jni.o; text: .text%jni_DeleteWeakGlobalRef: jni.o;
@ -2849,7 +2699,6 @@ text: .text%JVM_IsSameClassPackage;
text: .text%__1cNCanonicalizerPdo_MonitorEnter6MpnMMonitorEnter__v_; text: .text%__1cNCanonicalizerPdo_MonitorEnter6MpnMMonitorEnter__v_;
text: .text%__1cLMonitorExitFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cLMonitorExitFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerOdo_MonitorExit6MpnLMonitorExit__v_; text: .text%__1cNCanonicalizerOdo_MonitorExit6MpnLMonitorExit__v_;
text: .text%__1cNAccessMonitorPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_MonitorEnter6MpnMMonitorEnter__v_; text: .text%__1cQNullCheckVisitorPdo_MonitorEnter6MpnMMonitorEnter__v_;
text: .text%__1cTNullCheckEliminatorUhandle_AccessMonitor6MpnNAccessMonitor__v_; text: .text%__1cTNullCheckEliminatorUhandle_AccessMonitor6MpnNAccessMonitor__v_;
text: .text%__1cQNullCheckVisitorOdo_MonitorExit6MpnLMonitorExit__v_; text: .text%__1cQNullCheckVisitorOdo_MonitorExit6MpnLMonitorExit__v_;
@ -2867,7 +2716,6 @@ text: .text%Unsafe_AllocateInstance;
text: .text%jni_AllocObject: jni.o; text: .text%jni_AllocObject: jni.o;
text: .text%__1cQinstanceRefKlassUoop_oop_iterate_nv_m6MpnHoopDesc_pnQFilteringClosure_nJMemRegion__i_; text: .text%__1cQinstanceRefKlassUoop_oop_iterate_nv_m6MpnHoopDesc_pnQFilteringClosure_nJMemRegion__i_;
text: .text%__1cNCanonicalizerMset_constant6Mi_v_: c1_Canonicalizer.o; text: .text%__1cNCanonicalizerMset_constant6Mi_v_: c1_Canonicalizer.o;
text: .text%__1cJTypeCheckPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cNLIR_AssemblerMcheck_icache6M_i_; text: .text%__1cNLIR_AssemblerMcheck_icache6M_i_;
text: .text%__1cNLIR_AssemblerZjobject2reg_with_patching6MpnMRegisterImpl_pnMCodeEmitInfo__v_; text: .text%__1cNLIR_AssemblerZjobject2reg_with_patching6MpnMRegisterImpl_pnMCodeEmitInfo__v_;
text: .text%__1cIRuntime1Mnew_instance6FpnKJavaThread_pnMklassOopDesc__v_; text: .text%__1cIRuntime1Mnew_instance6FpnKJavaThread_pnMklassOopDesc__v_;
@ -2878,7 +2726,6 @@ text: .text%__1cNFloatConstantQas_FloatConstant6M_p0_: c1_Canonicalizer.o;
text: .text%__1cJFloatTypeMas_FloatType6M_p0_: c1_Canonicalizer.o; text: .text%__1cJFloatTypeMas_FloatType6M_p0_: c1_Canonicalizer.o;
text: .text%__1cRAbstractAssemblerGa_long6Mi_v_; text: .text%__1cRAbstractAssemblerGa_long6Mi_v_;
text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_; text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_;
text: .text%__1cINewArrayPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIRuntime1Mmonitorenter6FpnKJavaThread_pnHoopDesc_pnPBasicObjectLock__v_; text: .text%__1cIRuntime1Mmonitorenter6FpnKJavaThread_pnHoopDesc_pnPBasicObjectLock__v_;
text: .text%__1cIRuntime1Lmonitorexit6FpnKJavaThread_pnPBasicObjectLock__v_; text: .text%__1cIRuntime1Lmonitorexit6FpnKJavaThread_pnPBasicObjectLock__v_;
text: .text%__1cNVM_DeoptimizeEdoit6M_v_; text: .text%__1cNVM_DeoptimizeEdoit6M_v_;
@ -2887,14 +2734,8 @@ text: .text%__1cHThreadsbFdeoptimized_wrt_marked_nmethods6F_v_;
text: .text%__1cKJavaThreadbFdeoptimized_wrt_marked_nmethods6M_v_; text: .text%__1cKJavaThreadbFdeoptimized_wrt_marked_nmethods6M_v_;
text: .text%__1cJCodeCachebGmake_marked_nmethods_not_entrant6F_v_; text: .text%__1cJCodeCachebGmake_marked_nmethods_not_entrant6F_v_;
text: .text%__1cJCodeCacheNalive_nmethod6FpnICodeBlob__pnHnmethod__; text: .text%__1cJCodeCacheNalive_nmethod6FpnICodeBlob__pnHnmethod__;
text: .text%__1cHnmethodbAmake_not_entrant_or_zombie6Mi_v_;
text: .text%__1cKNativeJumpUpatch_verified_entry6FpC11_v_; text: .text%__1cKNativeJumpUpatch_verified_entry6FpC11_v_;
text: .text%__1cHnmethodVmark_as_seen_on_stack6M_v_; text: .text%__1cHnmethodVmark_as_seen_on_stack6M_v_;
text: .text%__1cHThreadsLnmethods_do6F_v_;
text: .text%__1cKJavaThreadLnmethods_do6M_v_;
text: .text%__1cGThreadLnmethods_do6M_v_;
text: .text%__1cFframeLnmethods_do6M_v_;
text: .text%__1cFframeVnmethods_code_blob_do6M_v_;
text: .text%__1cONMethodSweeperPprocess_nmethod6FpnHnmethod__v_; text: .text%__1cONMethodSweeperPprocess_nmethod6FpnHnmethod__v_;
text: .text%__1cHnmethodVcleanup_inline_caches6M_v_; text: .text%__1cHnmethodVcleanup_inline_caches6M_v_;
text: .text%__1cKCompiledIC2t6MpnKRelocation__v_; text: .text%__1cKCompiledIC2t6MpnKRelocation__v_;
@ -2902,14 +2743,9 @@ text: .text%JVM_HoldsLock;
text: .text%__1cTMaskFillerForNativeIpass_int6M_v_: oopMapCache.o; text: .text%__1cTMaskFillerForNativeIpass_int6M_v_: oopMapCache.o;
text: .text%__1cNSharedRuntimeDf2l6Ff_x_; text: .text%__1cNSharedRuntimeDf2l6Ff_x_;
text: .text%__1cMGraphBuilderKcompare_op6MpnJValueType_nJBytecodesECode__v_; text: .text%__1cMGraphBuilderKcompare_op6MpnJValueType_nJBytecodesECode__v_;
text: .text%__1cJCompareOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerMdo_CompareOp6MpnJCompareOp__v_; text: .text%__1cNCanonicalizerMdo_CompareOp6MpnJCompareOp__v_;
text: .text%__1cJCompareOpEhash6kM_i_: c1_Instruction.o;
text: .text%__1cJCompareOpEname6kM_pkc_: c1_Instruction.o;
text: .text%__1cJCompareOpMas_CompareOp6M_p0_: c1_Instruction.o;
text: .text%__1cHnmethodSflush_dependencies6MpnRBoolObjectClosure__v_; text: .text%__1cHnmethodSflush_dependencies6MpnRBoolObjectClosure__v_;
text: .text%__1cNinstanceKlassYremove_dependent_nmethod6MpnHnmethod__v_; text: .text%__1cNinstanceKlassYremove_dependent_nmethod6MpnHnmethod__v_;
text: .text%__1cFVTuneOdelete_nmethod6FpnHnmethod__v_;
text: .text%__1cQPlaceholderEntryHoops_do6MpnKOopClosure__v_; text: .text%__1cQPlaceholderEntryHoops_do6MpnKOopClosure__v_;
text: .text%__1cHnmethodFflush6M_v_; text: .text%__1cHnmethodFflush6M_v_;
text: .text%__1cICodeBlobFflush6M_v_; text: .text%__1cICodeBlobFflush6M_v_;
@ -2951,9 +2787,7 @@ text: .text%__1cNCanonicalizerPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_;
text: .text%__1cMGraphBuilderNlookup_switch6M_v_; text: .text%__1cMGraphBuilderNlookup_switch6M_v_;
text: .text%__1cMLookupSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cMLookupSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerPdo_LookupSwitch6MpnMLookupSwitch__v_; text: .text%__1cNCanonicalizerPdo_LookupSwitch6MpnMLookupSwitch__v_;
text: .text%__1cMUnsafePutRawPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_UnsafePutRaw6MpnMUnsafePutRaw__v_; text: .text%__1cQNullCheckVisitorPdo_UnsafePutRaw6MpnMUnsafePutRaw__v_;
text: .text%__1cLUnsafeRawOpPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_; text: .text%__1cQNullCheckVisitorPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_;
text: .text%__1cQNullCheckVisitorPdo_LookupSwitch6MpnMLookupSwitch__v_; text: .text%__1cQNullCheckVisitorPdo_LookupSwitch6MpnMLookupSwitch__v_;
text: .text%__1cNSharedRuntimeEldiv6Fxx_x_; text: .text%__1cNSharedRuntimeEldiv6Fxx_x_;

View File

@ -135,7 +135,6 @@ text: .text%__1cNThreadServiceEinit6F_v_;
text: .text%__1cPPerfDataManagerTcreate_long_counter6FnJCounterNS_pkcnIPerfDataFUnits_xpnGThread__pnPPerfLongCounter__; text: .text%__1cPPerfDataManagerTcreate_long_counter6FnJCounterNS_pkcnIPerfDataFUnits_xpnGThread__pnPPerfLongCounter__;
text: .text%__1cORuntimeServiceEinit6F_v_; text: .text%__1cORuntimeServiceEinit6F_v_;
text: .text%__1cTClassLoadingServiceEinit6F_v_; text: .text%__1cTClassLoadingServiceEinit6F_v_;
text: .text%__1cKvtune_init6F_v_;
text: .text%__1cObytecodes_init6F_v_; text: .text%__1cObytecodes_init6F_v_;
text: .text%__1cJBytecodesKinitialize6F_v_; text: .text%__1cJBytecodesKinitialize6F_v_;
text: .text%__1cJBytecodesNpd_initialize6F_v_; text: .text%__1cJBytecodesNpd_initialize6F_v_;
@ -181,9 +180,7 @@ text: .text%__1cKMemoryPoolYrecord_peak_memory_usage6M_v_;
text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__; text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__;
text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o; text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o;
text: .text%__1cICodeHeapSallocated_capacity6kM_I_; text: .text%__1cICodeHeapSallocated_capacity6kM_I_;
text: .text%__1cKMemoryPoolImax_size6kM_I_: memoryPool.o;
text: .text%__1cXresource_allocate_bytes6FI_pc_; text: .text%__1cXresource_allocate_bytes6FI_pc_;
text: .text%__1cKCodeBuffer2t6MpCi_v_;
text: .text%__1cRAbstractAssembler2t6MpnKCodeBuffer__v_; text: .text%__1cRAbstractAssembler2t6MpnKCodeBuffer__v_;
text: .text%__1cTICacheStubGeneratorVgenerate_icache_flush6MppFpCii_i_v_; text: .text%__1cTICacheStubGeneratorVgenerate_icache_flush6MppFpCii_i_v_;
text: .text%__1cMStubCodeMark2t6MpnRStubCodeGenerator_pkc4_v_; text: .text%__1cMStubCodeMark2t6MpnRStubCodeGenerator_pkc4_v_;
@ -194,7 +191,6 @@ text: .text%__1cMStubCodeMark2T6M_v_;
text: .text%__1cRAbstractAssemblerFflush6M_v_; text: .text%__1cRAbstractAssemblerFflush6M_v_;
text: .text%__1cOAbstractICacheQinvalidate_range6FpCi_v_; text: .text%__1cOAbstractICacheQinvalidate_range6FpCi_v_;
text: .text%__1cRStubCodeGeneratorLstub_epilog6MpnMStubCodeDesc__v_; text: .text%__1cRStubCodeGeneratorLstub_epilog6MpnMStubCodeDesc__v_;
text: .text%__1cFVTuneNregister_stub6FpkcpC3_v_;
text: .text%__1cFForteNregister_stub6FpkcpC3_v_; text: .text%__1cFForteNregister_stub6FpkcpC3_v_;
text: .text%__1cPVM_Version_init6F_v_; text: .text%__1cPVM_Version_init6F_v_;
text: .text%jio_snprintf; text: .text%jio_snprintf;
@ -314,10 +310,8 @@ text: .text%__1cLReadClosureGdo_ptr6MppnIHeapWord__v_: restore.o;
text: .text%__1cLReadClosureGdo_ptr6Mppv_v_: restore.o; text: .text%__1cLReadClosureGdo_ptr6Mppv_v_: restore.o;
text: .text%__1cLReadClosureJdo_size_t6MpI_v_: restore.o; text: .text%__1cLReadClosureJdo_size_t6MpI_v_: restore.o;
text: .text%__1cLReadClosureGdo_oop6MppnHoopDesc__v_: restore.o; text: .text%__1cLReadClosureGdo_oop6MppnHoopDesc__v_: restore.o;
text: .text%__1cJCodeCacheHoops_do6FpnKOopClosure__v_;
text: .text%__1cICodeHeapLfirst_block6kM_pnJHeapBlock__; text: .text%__1cICodeHeapLfirst_block6kM_pnJHeapBlock__;
text: .text%__1cICodeHeapJnext_free6kMpnJHeapBlock__pv_; text: .text%__1cICodeHeapJnext_free6kMpnJHeapBlock__pv_;
text: .text%__1cKBufferBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cICodeHeapLblock_start6kMpv_pnJHeapBlock__; text: .text%__1cICodeHeapLblock_start6kMpv_pnJHeapBlock__;
text: .text%__1cICodeHeapKfind_start6kMpv_1_; text: .text%__1cICodeHeapKfind_start6kMpv_1_;
text: .text%__1cICodeHeapKnext_block6kMpnJHeapBlock__2_; text: .text%__1cICodeHeapKnext_block6kMpnJHeapBlock__2_;
@ -334,7 +328,6 @@ text: .text%__1cbCAbstractInterpreterGenerator2t6MpnJStubQdDueue__v_;
text: .text%__1cbCAbstractInterpreterGeneratorMgenerate_all6M_v_; text: .text%__1cbCAbstractInterpreterGeneratorMgenerate_all6M_v_;
text: .text%__1cJStubQdDueueHrequest6Mi_pnEStub__; text: .text%__1cJStubQdDueueHrequest6Mi_pnEStub__;
text: .text%__1cJStubQdDueueGcommit6Mi_v_; text: .text%__1cJStubQdDueueGcommit6Mi_v_;
text: .text%__1cZInterpreterMacroAssemblerbAget_cache_and_index_at_bcp6MpnMRegisterImpl_2i_v_;
text: .text%__1cZInterpreterMacroAssemblerZget_2_byte_integer_at_bcp6MipnMRegisterImpl_2n0ALsignedOrNot_n0AKsetCCOrNot__v_; text: .text%__1cZInterpreterMacroAssemblerZget_2_byte_integer_at_bcp6MipnMRegisterImpl_2n0ALsignedOrNot_n0AKsetCCOrNot__v_;
text: .text%__1cZInterpreterMacroAssemblerNdispatch_next6MnITosState_i_v_; text: .text%__1cZInterpreterMacroAssemblerNdispatch_next6MnITosState_i_v_;
text: .text%__1cOMacroAssemblerKverify_FPU6Mipkc_v_; text: .text%__1cOMacroAssemblerKverify_FPU6Mipkc_v_;
@ -357,7 +350,6 @@ text: .text%__1cZInterpreterMacroAssemblerNunlock_object6MpnMRegisterImpl__v_;
text: .text%__1cQRelocationHolderEplus6kMi_0_; text: .text%__1cQRelocationHolderEplus6kMi_0_;
text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC222_v_; text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC222_v_;
text: .text%__1cOMacroAssemblerNset_vm_result6MpnMRegisterImpl__v_; text: .text%__1cOMacroAssemblerNset_vm_result6MpnMRegisterImpl__v_;
text: .text%__1cZInterpreterMacroAssemblerSsuper_call_VM_leaf6MpnMRegisterImpl_pC2_v_;
text: .text%__1cOMacroAssemblerRcall_VM_leaf_base6MpnMRegisterImpl_pCi_v_; text: .text%__1cOMacroAssemblerRcall_VM_leaf_base6MpnMRegisterImpl_pCi_v_;
text: .text%__1cbCAbstractInterpreterGeneratorVgenerate_method_entry6MnTAbstractInterpreterKMethodKind__pC_; text: .text%__1cbCAbstractInterpreterGeneratorVgenerate_method_entry6MnTAbstractInterpreterKMethodKind__pC_;
text: .text%__1cUInterpreterGeneratorVgenerate_counter_incr6MpnFLabel_22_v_; text: .text%__1cUInterpreterGeneratorVgenerate_counter_incr6MpnFLabel_22_v_;
@ -513,7 +505,6 @@ text: .text%__1cZInterpreterMacroAssemblerWprofile_switch_default6MpnMRegisterIm
text: .text%__1cNTemplateTableMlookupswitch6F_v_; text: .text%__1cNTemplateTableMlookupswitch6F_v_;
text: .text%__1cNTemplateTableH_return6FnITosState__v_; text: .text%__1cNTemplateTableH_return6FnITosState__v_;
text: .text%__1cNTemplateTableJgetstatic6Fi_v_; text: .text%__1cNTemplateTableJgetstatic6Fi_v_;
text: .text%__1cNTemplateTableXresolve_cache_and_index6FipnMRegisterImpl_2_v_;
text: .text%__1cNTemplateTableJputstatic6Fi_v_; text: .text%__1cNTemplateTableJputstatic6Fi_v_;
text: .text%__1cNTemplateTableIgetfield6Fi_v_; text: .text%__1cNTemplateTableIgetfield6Fi_v_;
text: .text%__1cOMacroAssemblerKnull_check6MpnMRegisterImpl_i_v_; text: .text%__1cOMacroAssemblerKnull_check6MpnMRegisterImpl_i_v_;
@ -521,7 +512,6 @@ text: .text%__1cNTemplateTableIputfield6Fi_v_;
text: .text%__1cNTemplateTableNinvokevirtual6Fi_v_; text: .text%__1cNTemplateTableNinvokevirtual6Fi_v_;
text: .text%__1cNTemplateTableTinvokevfinal_helper6FpnMRegisterImpl_2_v_; text: .text%__1cNTemplateTableTinvokevfinal_helper6FpnMRegisterImpl_2_v_;
text: .text%__1cZInterpreterMacroAssemblerSprofile_final_call6MpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerSprofile_final_call6MpnMRegisterImpl__v_;
text: .text%__1cZInterpreterMacroAssemblerUprofile_virtual_call6MpnMRegisterImpl_2_v_;
text: .text%__1cNTemplateTableUgenerate_vtable_call6FpnMRegisterImpl_22_v_; text: .text%__1cNTemplateTableUgenerate_vtable_call6FpnMRegisterImpl_22_v_;
text: .text%__1cNTemplateTableNinvokespecial6Fi_v_; text: .text%__1cNTemplateTableNinvokespecial6Fi_v_;
text: .text%__1cZInterpreterMacroAssemblerMprofile_call6MpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerMprofile_call6MpnMRegisterImpl__v_;
@ -629,8 +619,6 @@ text: .text%__1cQjni_handles_init6F_v_;
text: .text%__1cOvmStructs_init6F_v_; text: .text%__1cOvmStructs_init6F_v_;
text: .text%__1cMRegisterImplEname6kM_pkc_; text: .text%__1cMRegisterImplEname6kM_pkc_;
text: .text%__1cRFloatRegisterImplEname6kM_pkc_; text: .text%__1cRFloatRegisterImplEname6kM_pkc_;
text: .text%__1cIFrameMapEinit6F_v_;
text: .text%__1cIRuntime1Kinitialize6F_v_;
text: .text%__1cIRuntime1Ninitialize_pd6F_v_; text: .text%__1cIRuntime1Ninitialize_pd6F_v_;
text: .text%__1cNSharedRuntimeTgenerate_deopt_blob6F_v_; text: .text%__1cNSharedRuntimeTgenerate_deopt_blob6F_v_;
text: .text%__1cOMacroAssemblerZtotal_frame_size_in_bytes6Mi_i_; text: .text%__1cOMacroAssemblerZtotal_frame_size_in_bytes6Mi_i_;
@ -689,7 +677,6 @@ text: .text%__1cEUTF8Sconvert_to_unicode6FpkcpHi_v_;
text: .text%__1cNinstanceKlassRallocate_instance6MpnGThread__pnPinstanceOopDesc__; text: .text%__1cNinstanceKlassRallocate_instance6MpnGThread__pnPinstanceOopDesc__;
text: .text%__1cTjava_lang_ThrowableLset_message6FpnHoopDesc_2_v_; text: .text%__1cTjava_lang_ThrowableLset_message6FpnHoopDesc_2_v_;
text: .text%__1cMNativeLookupTbase_library_lookup6Fpkc22_pC_; text: .text%__1cMNativeLookupTbase_library_lookup6Fpkc22_pC_;
text: .text%__1cKoopFactoryKnew_symbol6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__; text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cNinstanceKlassWuncached_lookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassWuncached_lookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cMstringStream2t6MI_v_; text: .text%__1cMstringStream2t6MI_v_;
@ -788,8 +775,6 @@ text: .text%__1cXSignatureHandlerLibraryOpd_set_handler6FpC_v_;
text: .text%__1cNmethodOopDescVset_signature_handler6MpC_v_; text: .text%__1cNmethodOopDescVset_signature_handler6MpC_v_;
text: .text%jni_RegisterNatives: jni.o; text: .text%jni_RegisterNatives: jni.o;
text: .text%__1cPjava_lang_ClassLas_klassOop6FpnHoopDesc__pnMklassOopDesc__; text: .text%__1cPjava_lang_ClassLas_klassOop6FpnHoopDesc__pnMklassOopDesc__;
text: .text%__1cLSymbolTableFprobe6Fpkci_pnNsymbolOopDesc__;
text: .text%__1cFKlassNlookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cOJNIHandleBlockNrelease_block6Fp0pnGThread__v_; text: .text%__1cOJNIHandleBlockNrelease_block6Fp0pnGThread__v_;
text: .text%__1cSObjectSynchronizerJnotifyall6FnGHandle_pnGThread__v_; text: .text%__1cSObjectSynchronizerJnotifyall6FnGHandle_pnGThread__v_;
text: .text%__1cSInterpreterRuntimeJanewarray6FpnKJavaThread_pnTconstantPoolOopDesc_ii_v_; text: .text%__1cSInterpreterRuntimeJanewarray6FpnKJavaThread_pnTconstantPoolOopDesc_ii_v_;
@ -829,7 +814,6 @@ text: .text%__1cbBcreate_initial_thread_group6FpnGThread__nGHandle__: thread.o;
text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_5pnGThread__v_; text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_5pnGThread__v_;
text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_; text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMsymbolHandle_533pnGThread__v_;
text: .text%__1cNmethodOopDescIbci_from6kMpC_i_; text: .text%__1cNmethodOopDescIbci_from6kMpC_i_;
text: .text%__1cPBytecode_invokeJsignature6kM_pnNsymbolOopDesc__;
text: .text%__1cNmethodOopDescIbcp_from6kMi_pC_; text: .text%__1cNmethodOopDescIbcp_from6kMi_pC_;
text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__; text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__;
text: .text%__1cRSignatureIteratorSiterate_parameters6M_v_; text: .text%__1cRSignatureIteratorSiterate_parameters6M_v_;
@ -983,7 +967,6 @@ text: .text%jni_GetStringUTFLength: jni.o;
text: .text%__1cQjava_lang_StringLutf8_length6FpnHoopDesc__i_; text: .text%__1cQjava_lang_StringLutf8_length6FpnHoopDesc__i_;
text: .text%__1cHUNICODELutf8_length6FpHi_i_; text: .text%__1cHUNICODELutf8_length6FpHi_i_;
text: .text%jni_GetStringLength: jni.o; text: .text%jni_GetStringLength: jni.o;
text: .text%__1cQjava_lang_StringGlength6FpnHoopDesc__i_;
text: .text%jni_GetStringUTFRegion: jni.o; text: .text%jni_GetStringUTFRegion: jni.o;
text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc_ii_pc_; text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc_ii_pc_;
text: .text%JVM_FindClassFromClassLoader; text: .text%JVM_FindClassFromClassLoader;
@ -1022,7 +1005,6 @@ text: .text%__1cbDjava_lang_reflect_ConstructorEslot6FpnHoopDesc__i_;
text: .text%__1cbIjava_lang_reflect_AccessibleObjectIoverride6FpnHoopDesc__C_; text: .text%__1cbIjava_lang_reflect_AccessibleObjectIoverride6FpnHoopDesc__C_;
text: .text%__1cbDjava_lang_reflect_ConstructorPparameter_types6FpnHoopDesc__2_; text: .text%__1cbDjava_lang_reflect_ConstructorPparameter_types6FpnHoopDesc__2_;
text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__; text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__;
text: .text%__1cFVTuneQstart_class_load6F_v_;
text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o; text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o;
text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_; text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_;
text: .text%__1cRClassPathZipEntryLopen_stream6Mpkc_pnPClassFileStream__; text: .text%__1cRClassPathZipEntryLopen_stream6Mpkc_pnPClassFileStream__;
@ -1060,8 +1042,6 @@ text: .text%__1cPClassFileParserMsort_methods6MnOobjArrayHandle_111pnGThread__nP
text: .text%method_compare: methodOop.o; text: .text%method_compare: methodOop.o;
text: .text%__1cLklassItableTcompute_itable_size6FnOobjArrayHandle__i_; text: .text%__1cLklassItableTcompute_itable_size6FnOobjArrayHandle__i_;
text: .text%__1cUvisit_all_interfaces6FpnPobjArrayOopDesc_pnXInterfaceVisiterClosure__v_; text: .text%__1cUvisit_all_interfaces6FpnPobjArrayOopDesc_pnXInterfaceVisiterClosure__v_;
text: .text%__1cKoopFactoryRnew_instanceKlass6FiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cSinstanceKlassKlassXallocate_instance_klass6MiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o; text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o;
text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o; text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o;
text: .text%__1cNinstanceKlassQinit_implementor6M_v_; text: .text%__1cNinstanceKlassQinit_implementor6M_v_;
@ -1077,7 +1057,6 @@ text: .text%__1cPClassFileParserVset_precomputed_flags6MnTinstanceKlassHandle__v
text: .text%__1cPClassFileParserbCcheck_super_interface_access6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cPClassFileParserbCcheck_super_interface_access6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cPClassFileParserbBcheck_final_method_override6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cPClassFileParserbBcheck_final_method_override6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cSThreadProfilerMark2T6M_v_; text: .text%__1cSThreadProfilerMark2T6M_v_;
text: .text%__1cFVTuneOend_class_load6F_v_;
text: .text%__1cIRewriterHrewrite6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cIRewriterHrewrite6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cYconstantPoolCacheOopDescKinitialize6MrnIintArray__v_; text: .text%__1cYconstantPoolCacheOopDescKinitialize6MrnIintArray__v_;
text: .text%JVM_MaxMemory; text: .text%JVM_MaxMemory;
@ -1091,15 +1070,11 @@ text: .text%Unsafe_FreeMemory;
text: .text%__1cNSignatureInfoIdo_float6M_v_: bytecode.o; text: .text%__1cNSignatureInfoIdo_float6M_v_: bytecode.o;
text: .text%jni_NewObjectV: jni.o; text: .text%jni_NewObjectV: jni.o;
text: .text%jni_GetStringRegion: jni.o; text: .text%jni_GetStringRegion: jni.o;
text: .text%__1cQjava_lang_StringGoffset6FpnHoopDesc__i_;
text: .text%__1cQjava_lang_StringFvalue6FpnHoopDesc__pnQtypeArrayOopDesc__;
text: .text%jni_GetObjectField: jni.o; text: .text%jni_GetObjectField: jni.o;
text: .text%jni_GetStringCritical: jni.o; text: .text%jni_GetStringCritical: jni.o;
text: .text%jni_ReleaseStringCritical: jni.o; text: .text%jni_ReleaseStringCritical: jni.o;
text: .text%__1cQSimpleCompPolicyXmethod_invocation_event6MnMmethodHandle_pnGThread__v_; text: .text%__1cQSimpleCompPolicyXmethod_invocation_event6MnMmethodHandle_pnGThread__v_;
text: .text%__1cRInvocationCounterJset_carry6M_v_; text: .text%__1cRInvocationCounterJset_carry6M_v_;
text: .text%__1cNCompileBrokerOcompile_method6FnMmethodHandle_i1ipkcpnGThread__pnHnmethod__;
text: .text%__1cQSimpleCompPolicyRcompilation_level6MnMmethodHandle_i_i_;
text: .text%__1cNinstanceKlassUfind_interface_field6kMpnNsymbolOopDesc_2pnPfieldDescriptor__pnMklassOopDesc__; text: .text%__1cNinstanceKlassUfind_interface_field6kMpnNsymbolOopDesc_2pnPfieldDescriptor__pnMklassOopDesc__;
text: .text%JVM_LoadLibrary; text: .text%JVM_LoadLibrary;
text: .text%JVM_FindLibraryEntry; text: .text%JVM_FindLibraryEntry;
@ -1153,7 +1128,6 @@ text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMs
text: .text%__1cCosOsignal_init_pd6F_v_; text: .text%__1cCosOsignal_init_pd6F_v_;
text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_; text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_;
text: .text%__1cICompiler2t6M_v_; text: .text%__1cICompiler2t6M_v_;
text: .text%__1cNCompileBrokerVinit_compiler_threads6Fi_v_;
text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_; text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_;
text: .text%__1cNCompileBrokerUmake_compiler_thread6FpkcpnMCompileQdDueue_pnQCompilerCounters_pnGThread__pnOCompilerThread__; text: .text%__1cNCompileBrokerUmake_compiler_thread6FpkcpnMCompileQdDueue_pnQCompilerCounters_pnGThread__pnOCompilerThread__;
text: .text%__1cTsignal_thread_entry6FpnKJavaThread_pnGThread__v_: os.o; text: .text%__1cTsignal_thread_entry6FpnKJavaThread_pnGThread__v_: os.o;
@ -1188,7 +1162,6 @@ text: .text%__1cRLowMemoryDetectorbGlow_memory_detector_thread_entry6FpnKJavaThr
text: .text%__1cKPerfStringKset_string6Mpkc_v_; text: .text%__1cKPerfStringKset_string6Mpkc_v_;
text: .text%__1cPciObjectFactory2t6MpnFArena_i_v_; text: .text%__1cPciObjectFactory2t6MpnFArena_i_v_;
text: .text%__1cPciObjectFactoryTinit_shared_objects6M_v_; text: .text%__1cPciObjectFactoryTinit_shared_objects6M_v_;
text: .text%__1cIciSymbol2t6MnMsymbolHandle__v_;
text: .text%__1cIciObject2t6MnGHandle__v_; text: .text%__1cIciObject2t6MnGHandle__v_;
text: .text%__1cIciObjectJset_ident6MI_v_; text: .text%__1cIciObjectJset_ident6MI_v_;
text: .text%__1cGciType2t6MnJBasicType__v_; text: .text%__1cGciType2t6MnJBasicType__v_;
@ -1267,7 +1240,6 @@ text: .text%__1cOGenerateOopMapYrewrite_refval_conflicts6M_v_;
text: .text%__1cOGenerateOopMapNreport_result6M_v_; text: .text%__1cOGenerateOopMapNreport_result6M_v_;
text: .text%__1cLCompilationJbuild_hir6M_v_; text: .text%__1cLCompilationJbuild_hir6M_v_;
text: .text%__1cCIR2t6MpnLCompilation_pnIciMethod_i_v_; text: .text%__1cCIR2t6MpnLCompilation_pnIciMethod_i_v_;
text: .text%__1cJValueTypeKinitialize6F_v_;
text: .text%__1cMciNullObjectEmake6F_p0_; text: .text%__1cMciNullObjectEmake6F_p0_;
text: .text%__1cMGraphBuilderKinitialize6F_v_; text: .text%__1cMGraphBuilderKinitialize6F_v_;
text: .text%__1cJXHandlers2t6MpnIciMethod__v_; text: .text%__1cJXHandlers2t6MpnIciMethod__v_;
@ -1275,13 +1247,11 @@ text: .text%__1cIciMethodJload_code6M_v_;
text: .text%__1cLCompilationTdebug_info_recorder6kM_pnYDebugInformationRecorder__; text: .text%__1cLCompilationTdebug_info_recorder6kM_pnYDebugInformationRecorder__;
text: .text%__1cHIRScopeLbuild_graph6MpnLCompilation_i_pnKBlockBegin__; text: .text%__1cHIRScopeLbuild_graph6MpnLCompilation_i_pnKBlockBegin__;
text: .text%__1cQBlockListBuilderLset_leaders6M_v_; text: .text%__1cQBlockListBuilderLset_leaders6M_v_;
text: .text%__1cKValueStack2t6MpnHIRScope_ii_v_;
text: .text%__1cLciSignatureHtype_at6kMi_pnGciType__; text: .text%__1cLciSignatureHtype_at6kMi_pnGciType__;
text: .text%__1cMas_ValueType6FnJBasicType__pnJValueType__; text: .text%__1cMas_ValueType6FnJBasicType__pnJValueType__;
text: .text%__1cIValueMap2t6M_v_; text: .text%__1cIValueMap2t6M_v_;
text: .text%__1cNResourceArrayGexpand6MIiri_v_; text: .text%__1cNResourceArrayGexpand6MIiri_v_;
text: .text%__1cIValueMapIkill_all6M_v_; text: .text%__1cIValueMapIkill_all6M_v_;
text: .text%__1cKValueStackEcopy6M_p0_;
text: .text%__1cMGraphBuilderbBiterate_bytecodes_for_block6Mi_pnIBlockEnd__; text: .text%__1cMGraphBuilderbBiterate_bytecodes_for_block6Mi_pnIBlockEnd__;
text: .text%__1cMGraphBuilderJScopeDataIblock_at6Mi_pnKBlockBegin__; text: .text%__1cMGraphBuilderJScopeDataIblock_at6Mi_pnKBlockBegin__;
text: .text%__1cMGraphBuilderKload_local6MpnJValueType_i_v_; text: .text%__1cMGraphBuilderKload_local6MpnJValueType_i_v_;
@ -1322,7 +1292,6 @@ text: .text%__1cPciInstanceKlassLfield_cache6M_pnTciConstantPoolCache__;
text: .text%__1cHciField2t6MpnPciInstanceKlass_i_v_; text: .text%__1cHciField2t6MpnPciInstanceKlass_i_v_;
text: .text%__1cHciFieldPinitialize_from6MpnPfieldDescriptor__v_; text: .text%__1cHciFieldPinitialize_from6MpnPfieldDescriptor__v_;
text: .text%__1cTconstantPoolOopDescbCklass_ref_at_if_loaded_check6FnSconstantPoolHandle_ipnGThread__pnMklassOopDesc__; text: .text%__1cTconstantPoolOopDescbCklass_ref_at_if_loaded_check6FnSconstantPoolHandle_ipnGThread__pnMklassOopDesc__;
text: .text%__1cKValueStackKcopy_locks6M_p0_;
text: .text%__1cJLoadFieldFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cJLoadFieldFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerMdo_LoadField6MpnJLoadField__v_; text: .text%__1cNCanonicalizerMdo_LoadField6MpnJLoadField__v_;
text: .text%__1cJLoadFieldEhash6kM_i_: c1_Instruction.o; text: .text%__1cJLoadFieldEhash6kM_i_: c1_Instruction.o;
@ -1365,27 +1334,18 @@ text: .text%__1cTNullCheckEliminatorLiterate_one6MpnKBlockBegin__v_;
text: .text%__1cGBitMapIset_from6M0_v_; text: .text%__1cGBitMapIset_from6M0_v_;
text: .text%__1cQNullCheckVisitorNdo_BlockBegin6MpnKBlockBegin__v_; text: .text%__1cQNullCheckVisitorNdo_BlockBegin6MpnKBlockBegin__v_;
text: .text%__1cQNullCheckVisitorHdo_Base6MpnEBase__v_; text: .text%__1cQNullCheckVisitorHdo_Base6MpnEBase__v_;
text: .text%__1cKStateSplitPinput_values_do6MpFppnLInstruction__v_v_: c1_Canonicalizer.o;
text: .text%__1cEGotoFvisit6MpnSInstructionVisitor__v_: c1_Canonicalizer.o; text: .text%__1cEGotoFvisit6MpnSInstructionVisitor__v_: c1_Canonicalizer.o;
text: .text%__1cQNullCheckVisitorHdo_Goto6MpnEGoto__v_; text: .text%__1cQNullCheckVisitorHdo_Goto6MpnEGoto__v_;
text: .text%__1cCIfPinput_values_do6MpFppnLInstruction__v_v_: c1_Canonicalizer.o;
text: .text%__1cTNullCheckEliminatorIdo_value6FppnLInstruction__v_;
text: .text%__1cFLocalPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cFLocalFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cFLocalFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorIdo_Local6MpnFLocal__v_; text: .text%__1cQNullCheckVisitorIdo_Local6MpnFLocal__v_;
text: .text%__1cQNullCheckVisitorFdo_If6MpnCIf__v_; text: .text%__1cQNullCheckVisitorFdo_If6MpnCIf__v_;
text: .text%__1cGReturnPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cIConstantPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorLdo_Constant6MpnIConstant__v_; text: .text%__1cQNullCheckVisitorLdo_Constant6MpnIConstant__v_;
text: .text%__1cQNullCheckVisitorJdo_Return6MpnGReturn__v_; text: .text%__1cQNullCheckVisitorJdo_Return6MpnGReturn__v_;
text: .text%__1cJTypeCheckPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorNdo_InstanceOf6MpnKInstanceOf__v_; text: .text%__1cQNullCheckVisitorNdo_InstanceOf6MpnKInstanceOf__v_;
text: .text%__1cQNullCheckVisitorMdo_CheckCast6MpnJCheckCast__v_; text: .text%__1cQNullCheckVisitorMdo_CheckCast6MpnJCheckCast__v_;
text: .text%__1cLAccessFieldPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorMdo_LoadField6MpnJLoadField__v_; text: .text%__1cQNullCheckVisitorMdo_LoadField6MpnJLoadField__v_;
text: .text%__1cTNullCheckEliminatorShandle_AccessField6MpnLAccessField__v_; text: .text%__1cTNullCheckEliminatorShandle_AccessField6MpnLAccessField__v_;
text: .text%__1cQNullCheckVisitorPdo_ArithmeticOp6MpnMArithmeticOp__v_; text: .text%__1cQNullCheckVisitorPdo_ArithmeticOp6MpnMArithmeticOp__v_;
text: .text%__1cLAccessArrayPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorOdo_ArrayLength6MpnLArrayLength__v_; text: .text%__1cQNullCheckVisitorOdo_ArrayLength6MpnLArrayLength__v_;
text: .text%__1cTNullCheckEliminatorShandle_ArrayLength6MpnLArrayLength__v_; text: .text%__1cTNullCheckEliminatorShandle_ArrayLength6MpnLArrayLength__v_;
text: .text%__1cQNullCheckVisitorOdo_LoadIndexed6MpnLLoadIndexed__v_; text: .text%__1cQNullCheckVisitorOdo_LoadIndexed6MpnLLoadIndexed__v_;
@ -1403,16 +1363,12 @@ text: .text%__1cIBlockEndOsubstitute_sux6MpnKBlockBegin_2_v_;
text: .text%__1cCIRMcompute_code6M_v_; text: .text%__1cCIRMcompute_code6M_v_;
text: .text%__1cLInstructionGnegate6Fn0AJCondition__1_; text: .text%__1cLInstructionGnegate6Fn0AJCondition__1_;
text: .text%__1cJBlockListJblocks_do6MpFpnKBlockBegin__v_v_; text: .text%__1cJBlockListJblocks_do6MpFpnKBlockBegin__v_v_;
text: .text%__1cQUseCountComputerQupdate_use_count6FppnLInstruction__v_: c1_IR.o;
text: .text%__1cKStateSplitPstate_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cKValueStackJvalues_do6MpFppnLInstruction__v_v_;
text: .text%__1cFLocalIas_Local6M_p0_: c1_GraphBuilder.o; text: .text%__1cFLocalIas_Local6M_p0_: c1_GraphBuilder.o;
text: .text%__1cLCompilationIemit_lir6M_v_; text: .text%__1cLCompilationIemit_lir6M_v_;
text: .text%__1cLInstructionGas_Phi6M_pnDPhi__: c1_Canonicalizer.o; text: .text%__1cLInstructionGas_Phi6M_pnDPhi__: c1_Canonicalizer.o;
text: .text%__1cMas_BasicType6FpnJValueType__nJBasicType__; text: .text%__1cMas_BasicType6FpnJValueType__nJBasicType__;
text: .text%__1cJValueTypeRas_ObjectConstant6M_pnOObjectConstant__: c1_Canonicalizer.o; text: .text%__1cJValueTypeRas_ObjectConstant6M_pnOObjectConstant__: c1_Canonicalizer.o;
text: .text%__1cLLIR_OprFactKvalue_type6FpnJValueType__pnLLIR_OprDesc__; text: .text%__1cLLIR_OprFactKvalue_type6FpnJValueType__pnLLIR_OprDesc__;
text: .text%__1cKValueStackMcaller_state6kM_p0_;
text: .text%__1cJArrayTypeMas_ArrayType6M_p0_: c1_ValueType.o; text: .text%__1cJArrayTypeMas_ArrayType6M_p0_: c1_ValueType.o;
text: .text%__1cILIR_ListKshift_left6MpnLLIR_OprDesc_222_v_; text: .text%__1cILIR_ListKshift_left6MpnLLIR_OprDesc_222_v_;
text: .text%__1cJValueTypeLas_VoidType6M_pnIVoidType__: c1_Canonicalizer.o; text: .text%__1cJValueTypeLas_VoidType6M_pnIVoidType__: c1_Canonicalizer.o;
@ -1442,7 +1398,6 @@ text: .text%__1cNLIR_AssemblerNemit_opBranch6MpnMLIR_OpBranch__v_;
text: .text%__1cNLIR_AssemblerKemit_delay6MpnLLIR_OpDelay__v_; text: .text%__1cNLIR_AssemblerKemit_delay6MpnLLIR_OpDelay__v_;
text: .text%__1cNLIR_AssemblerLcode_offset6kM_i_; text: .text%__1cNLIR_AssemblerLcode_offset6kM_i_;
text: .text%__1cNLIR_AssemblerQemit_opTypeCheck6MpnPLIR_OpTypeCheck__v_; text: .text%__1cNLIR_AssemblerQemit_opTypeCheck6MpnPLIR_OpTypeCheck__v_;
text: .text%__1cIciObjectIencoding6M_pnI_jobject__;
text: .text%__1cOoop_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cOoop_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cNLIR_AssemblerEload6MpnMRegisterImpl_i2nJBasicType_pnMCodeEmitInfo__i_; text: .text%__1cNLIR_AssemblerEload6MpnMRegisterImpl_i2nJBasicType_pnMCodeEmitInfo__i_;
text: .text%__1cNLIR_AssemblerIemit_op16MpnHLIR_Op1__v_; text: .text%__1cNLIR_AssemblerIemit_op16MpnHLIR_Op1__v_;
@ -1477,7 +1432,6 @@ text: .text%__1cYDebugInformationRecorderJdata_size6M_i_;
text: .text%__1cNRelocIteratorTadvance_over_prefix6M_v_; text: .text%__1cNRelocIteratorTadvance_over_prefix6M_v_;
text: .text%__1cOoop_RelocationLunpack_data6M_v_; text: .text%__1cOoop_RelocationLunpack_data6M_v_;
text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_; text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_;
text: .text%__1cLOopRecorderHcopy_to6MpnICodeBlob__v_;
text: .text%__1cIUniverseMnon_oop_word6F_pv_; text: .text%__1cIUniverseMnon_oop_word6F_pv_;
text: .text%__1cCosXnon_memory_address_word6F_pc_; text: .text%__1cCosXnon_memory_address_word6F_pc_;
text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_; text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_;
@ -1487,7 +1441,6 @@ text: .text%__1cODataRelocationJset_value6MpC_v_: relocInfo.o;
text: .text%__1cOoop_RelocationGoffset6M_i_: relocInfo.o; text: .text%__1cOoop_RelocationGoffset6M_i_: relocInfo.o;
text: .text%__1cKRelocationRpd_set_data_value6MpCi_v_; text: .text%__1cKRelocationRpd_set_data_value6MpCi_v_;
text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_; text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_;
text: .text%__1cFVTuneOcreate_nmethod6FpnHnmethod__v_;
text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_; text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_;
text: .text%__1cLCompilation2T6M_v_; text: .text%__1cLCompilation2T6M_v_;
text: .text%__1cFArena2T6M_v_; text: .text%__1cFArena2T6M_v_;
@ -1512,7 +1465,6 @@ text: .text%__1cTjava_lang_ThrowableTfill_in_stack_trace6FnGHandle_pnGThread__v_
text: .text%__1cVPreserveExceptionMark2T6M_v_; text: .text%__1cVPreserveExceptionMark2T6M_v_;
text: .text%__1cSInterpreterRuntimeXthrow_pending_exception6FpnKJavaThread__v_; text: .text%__1cSInterpreterRuntimeXthrow_pending_exception6FpnKJavaThread__v_;
text: .text%__1cSThreadLocalStorageGthread6F_pnGThread__: assembler_sparc.o; text: .text%__1cSThreadLocalStorageGthread6F_pnGThread__: assembler_sparc.o;
text: .text%__1cNSharedRuntimebKexception_handler_for_return_address6FpC_1_;
text: .text%__1cSInterpreterRuntimebFexception_handler_for_exception6FpnKJavaThread_pnHoopDesc__pC_; text: .text%__1cSInterpreterRuntimebFexception_handler_for_exception6FpnKJavaThread_pnHoopDesc__pC_;
text: .text%__1cNmethodOopDescbEfast_exception_handler_bci_for6MnLKlassHandle_ipnGThread__i_; text: .text%__1cNmethodOopDescbEfast_exception_handler_bci_for6MnLKlassHandle_ipnGThread__i_;
text: .text%__1cFframeZinterpreter_frame_set_bcp6MpC_v_; text: .text%__1cFframeZinterpreter_frame_set_bcp6MpC_v_;
@ -1530,8 +1482,6 @@ text: .text%__1cPciInstanceKlassYprotection_domain_handle6M_pnI_jobject__;
text: .text%__1cLNewInstanceFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cLNewInstanceFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerOdo_NewInstance6MpnLNewInstance__v_; text: .text%__1cNCanonicalizerOdo_NewInstance6MpnLNewInstance__v_;
text: .text%__1cMGraphBuilderGinvoke6MnJBytecodesECode__v_; text: .text%__1cMGraphBuilderGinvoke6MnJBytecodesECode__v_;
text: .text%__1cFciEnvTget_method_by_index6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cFciEnvYget_method_by_index_impl6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cPciObjectFactoryTget_unloaded_method6MpnPciInstanceKlass_pnIciSymbol_4_pnIciMethod__; text: .text%__1cPciObjectFactoryTget_unloaded_method6MpnPciInstanceKlass_pnIciSymbol_4_pnIciMethod__;
text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_; text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_;
text: .text%__1cNciMethodKlassEmake6F_p0_; text: .text%__1cNciMethodKlassEmake6F_p0_;
@ -1542,16 +1492,11 @@ text: .text%__1cKValueStackNpop_arguments6Mi_pnGValues__;
text: .text%__1cGInvokeFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cGInvokeFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerJdo_Invoke6MpnGInvoke__v_; text: .text%__1cNCanonicalizerJdo_Invoke6MpnGInvoke__v_;
text: .text%__1cGInvokeJas_Invoke6M_p0_: c1_Instruction.o; text: .text%__1cGInvokeJas_Invoke6M_p0_: c1_Instruction.o;
text: .text%__1cFThrowFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerIdo_Throw6MpnFThrow__v_; text: .text%__1cNCanonicalizerIdo_Throw6MpnFThrow__v_;
text: .text%__1cFThrowIas_Throw6M_p0_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorOdo_NewInstance6MpnLNewInstance__v_; text: .text%__1cQNullCheckVisitorOdo_NewInstance6MpnLNewInstance__v_;
text: .text%__1cGInvokePinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorJdo_Invoke6MpnGInvoke__v_; text: .text%__1cQNullCheckVisitorJdo_Invoke6MpnGInvoke__v_;
text: .text%__1cTNullCheckEliminatorNhandle_Invoke6MpnGInvoke__v_; text: .text%__1cTNullCheckEliminatorNhandle_Invoke6MpnGInvoke__v_;
text: .text%__1cFThrowPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorIdo_Throw6MpnFThrow__v_; text: .text%__1cQNullCheckVisitorIdo_Throw6MpnFThrow__v_;
text: .text%__1cFThrowPstate_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIVoidTypeLas_VoidType6M_p0_: c1_ValueType.o; text: .text%__1cIVoidTypeLas_VoidType6M_p0_: c1_ValueType.o;
text: .text%__1cLNewInstanceKexact_type6kM_pnGciType__; text: .text%__1cLNewInstanceKexact_type6kM_pnGciType__;
text: .text%__1cLNewInstanceOas_NewInstance6M_p0_: c1_Instruction.o; text: .text%__1cLNewInstanceOas_NewInstance6M_p0_: c1_Instruction.o;
@ -1562,7 +1507,6 @@ text: .text%__1cPNewInstanceStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_sparc.o
text: .text%__1cNLIR_AssemblerJemit_call6MpnOLIR_OpJavaCall__v_; text: .text%__1cNLIR_AssemblerJemit_call6MpnOLIR_OpJavaCall__v_;
text: .text%__1cNLIR_AssemblerKalign_call6MnILIR_Code__v_; text: .text%__1cNLIR_AssemblerKalign_call6MnILIR_Code__v_;
text: .text%__1cICodeStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_sparc.o; text: .text%__1cICodeStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_sparc.o;
text: .text%__1cNLIR_AssemblerEcall6MpCnJrelocInfoJrelocType_pnMCodeEmitInfo__v_;
text: .text%__1cbBopt_virtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cbBopt_virtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cYinternal_word_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cYinternal_word_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cMPatchingStubJemit_code6MpnNLIR_Assembler__v_; text: .text%__1cMPatchingStubJemit_code6MpnNLIR_Assembler__v_;
@ -1586,8 +1530,6 @@ text: .text%__1cFciEnvNlookup_method6MpnNinstanceKlass_2pnNsymbolOopDesc_4nJByte
text: .text%__1cMLinkResolverbCresolve_virtual_call_or_null6FnLKlassHandle_1nMsymbolHandle_21_nMmethodHandle__; text: .text%__1cMLinkResolverbCresolve_virtual_call_or_null6FnLKlassHandle_1nMsymbolHandle_21_nMmethodHandle__;
text: .text%__1cPciObjectFactoryMvm_symbol_at6Fi_pnIciSymbol__; text: .text%__1cPciObjectFactoryMvm_symbol_at6Fi_pnIciSymbol__;
text: .text%__1cNCanonicalizerMdo_NullCheck6MpnJNullCheck__v_; text: .text%__1cNCanonicalizerMdo_NullCheck6MpnJNullCheck__v_;
text: .text%__1cKValueStackKpush_scope6MpnHIRScope__p0_;
text: .text%__1cHIRScopeXcompute_lock_stack_size6M_v_;
text: .text%__1cMGraphBuilderJScopeDataLnum_returns6M_i_; text: .text%__1cMGraphBuilderJScopeDataLnum_returns6M_i_;
text: .text%__1cNCanonicalizerHdo_Goto6MpnEGoto__v_; text: .text%__1cNCanonicalizerHdo_Goto6MpnEGoto__v_;
text: .text%__1cMGraphBuilderJScopeDataQincr_num_returns6M_v_; text: .text%__1cMGraphBuilderJScopeDataQincr_num_returns6M_v_;
@ -1596,10 +1538,7 @@ text: .text%__1cLCompilationVnotice_inlined_method6MpnIciMethod__v_;
text: .text%__1cFciEnvVnotice_inlined_method6MpnIciMethod__v_; text: .text%__1cFciEnvVnotice_inlined_method6MpnIciMethod__v_;
text: .text%__1cMLinkResolverbCresolve_special_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__; text: .text%__1cMLinkResolverbCresolve_special_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__;
text: .text%__1cLInstructionEprev6MpnKBlockBegin__p0_; text: .text%__1cLInstructionEprev6MpnKBlockBegin__p0_;
text: .text%__1cIConstantPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIBlockEndPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cQNullCheckVisitorMdo_NullCheck6MpnJNullCheck__v_; text: .text%__1cQNullCheckVisitorMdo_NullCheck6MpnJNullCheck__v_;
text: .text%__1cHIRScopeNtop_scope_bci6kM_i_;
text: .text%__1cIFrameMapQmake_new_address6kMi_nHAddress__; text: .text%__1cIFrameMapQmake_new_address6kMi_nHAddress__;
text: .text%__1cNLIR_AssemblerFstore6MpnMRegisterImpl_2inJBasicType_pnMCodeEmitInfo__v_; text: .text%__1cNLIR_AssemblerFstore6MpnMRegisterImpl_2inJBasicType_pnMCodeEmitInfo__v_;
text: .text%__1cNLIR_AssemblerJstack2reg6MpnLLIR_OprDesc_2nJBasicType__v_; text: .text%__1cNLIR_AssemblerJstack2reg6MpnLLIR_OprDesc_2nJBasicType__v_;
@ -1610,7 +1549,6 @@ text: .text%__1cXNativeSignatureIteratorIdo_float6M_v_: interpreterRT_sparc.o;
text: .text%JVM_IsNaN; text: .text%JVM_IsNaN;
text: .text%__1cXNativeSignatureIteratorJdo_double6M_v_: interpreterRT_sparc.o; text: .text%__1cXNativeSignatureIteratorJdo_double6M_v_: interpreterRT_sparc.o;
text: .text%__1cSInterpreterRuntimeZSignatureHandlerGeneratorLpass_double6M_v_; text: .text%__1cSInterpreterRuntimeZSignatureHandlerGeneratorLpass_double6M_v_;
text: .text%__1cEIfOpPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cEIfOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cEIfOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cQNullCheckVisitorHdo_IfOp6MpnEIfOp__v_; text: .text%__1cQNullCheckVisitorHdo_IfOp6MpnEIfOp__v_;
text: .text%__1cOGenerateOopMapIcopy_cts6MpnNCellTypeState_2_i_; text: .text%__1cOGenerateOopMapIcopy_cts6MpnNCellTypeState_2_i_;
@ -1642,8 +1580,6 @@ text: .text%__1cHLogicOpEhash6kM_i_: c1_Instruction.o;
text: .text%__1cHLogicOpEname6kM_pkc_: c1_Instruction.o; text: .text%__1cHLogicOpEname6kM_pkc_: c1_Instruction.o;
text: .text%__1cMLinkResolverbBresolve_static_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__; text: .text%__1cMLinkResolverbBresolve_static_call_or_null6FnLKlassHandle_nMsymbolHandle_21_nMmethodHandle__;
text: .text%__1cQNullCheckVisitorNdo_StoreField6MpnKStoreField__v_; text: .text%__1cQNullCheckVisitorNdo_StoreField6MpnKStoreField__v_;
text: .text%__1cINewArrayPinput_values_do6MpFppnLInstruction__v_v_: c1_Instruction.o;
text: .text%__1cHConvertPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorKdo_Convert6MpnHConvert__v_; text: .text%__1cQNullCheckVisitorKdo_Convert6MpnHConvert__v_;
text: .text%__1cQNullCheckVisitorPdo_NewTypeArray6MpnMNewTypeArray__v_; text: .text%__1cQNullCheckVisitorPdo_NewTypeArray6MpnMNewTypeArray__v_;
text: .text%__1cJLoadFieldMas_LoadField6M_p0_: c1_Instruction.o; text: .text%__1cJLoadFieldMas_LoadField6M_p0_: c1_Instruction.o;
@ -1665,7 +1601,6 @@ text: .text%__1cRC1_MacroAssemblerPallocate_object6MpnMRegisterImpl_222ii2rnFLab
text: .text%__1cNLIR_AssemblerOmembar_release6M_v_; text: .text%__1cNLIR_AssemblerOmembar_release6M_v_;
text: .text%__1cNLIR_AssemblerGmembar6M_v_; text: .text%__1cNLIR_AssemblerGmembar6M_v_;
text: .text%__1cNLIR_AssemblerOmembar_acquire6M_v_; text: .text%__1cNLIR_AssemblerOmembar_acquire6M_v_;
text: .text%__1cNLIR_AssemblerHic_call6MpCpnMCodeEmitInfo__v_;
text: .text%__1cNLIR_AssemblerCpc6kM_pC_; text: .text%__1cNLIR_AssemblerCpc6kM_pC_;
text: .text%__1cXvirtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cXvirtual_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cNLIR_AssemblerIlogic_op6MnILIR_Code_pnLLIR_OprDesc_33_v_; text: .text%__1cNLIR_AssemblerIlogic_op6MnILIR_Code_pnLLIR_OprDesc_33_v_;
@ -1675,7 +1610,6 @@ text: .text%__1cXvirtual_call_RelocationLunpack_data6M_v_;
text: .text%__1cNinstanceKlassVadd_dependent_nmethod6MpnHnmethod__v_; text: .text%__1cNinstanceKlassVadd_dependent_nmethod6MpnHnmethod__v_;
text: .text%__1cGPcDescHreal_pc6kMpknHnmethod__pC_; text: .text%__1cGPcDescHreal_pc6kMpknHnmethod__pC_;
text: .text%__1cNSharedRuntimeXfind_callee_info_helper6FpnKJavaThread_rnMvframeStream_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__; text: .text%__1cNSharedRuntimeXfind_callee_info_helper6FpnKJavaThread_rnMvframeStream_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__;
text: .text%__1cPBytecode_invokeFindex6kM_i_;
text: .text%__1cLRegisterMapIpd_clear6M_v_; text: .text%__1cLRegisterMapIpd_clear6M_v_;
text: .text%__1cPBytecode_invokeNstatic_target6MpnGThread__nMmethodHandle__; text: .text%__1cPBytecode_invokeNstatic_target6MpnGThread__nMmethodHandle__;
text: .text%__1cMLinkResolverOresolve_method6FrnMmethodHandle_rnLKlassHandle_nSconstantPoolHandle_ipnGThread__v_; text: .text%__1cMLinkResolverOresolve_method6FrnMmethodHandle_rnLKlassHandle_nSconstantPoolHandle_ipnGThread__v_;
@ -1690,8 +1624,6 @@ text: .text%__1cKNativeCallXset_destination_mt_safe6MpC_v_;
text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_; text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_;
text: .text%__1cOGenerateOopMapGdo_ldc6Mii_v_; text: .text%__1cOGenerateOopMapGdo_ldc6Mii_v_;
text: .text%__1cMGraphBuilderNload_constant6M_v_; text: .text%__1cMGraphBuilderNload_constant6M_v_;
text: .text%__1cFciEnvVget_constant_by_index6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cFciEnvbAget_constant_by_index_impl6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cWstatic_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o; text: .text%__1cWstatic_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInfo.o;
text: .text%__1cKExceptionsL_throw_args6FpnGThread_pkcinMsymbolHandle_5pnRJavaCallArguments__v_; text: .text%__1cKExceptionsL_throw_args6FpnGThread_pkcinMsymbolHandle_5pnRJavaCallArguments__v_;
text: .text%__1cMPeriodicTaskOreal_time_tick6FI_v_; text: .text%__1cMPeriodicTaskOreal_time_tick6FI_v_;
@ -1700,7 +1632,6 @@ text: .text%jni_GetArrayLength: jni.o;
text: .text%JVM_Read; text: .text%JVM_Read;
text: .text%jni_GetByteArrayRegion: jni.o; text: .text%jni_GetByteArrayRegion: jni.o;
text: .text%JVM_DefineClassWithSource; text: .text%JVM_DefineClassWithSource;
text: .text%__1cQSystemDictionaryTresolve_from_stream6FnMsymbolHandle_nGHandle_2pnPClassFileStream_pnGThread__pnMklassOopDesc__;
text: .text%__1cPClassFileParserbDverify_legal_method_signature6MnMsymbolHandle_1pnGThread__i_; text: .text%__1cPClassFileParserbDverify_legal_method_signature6MnMsymbolHandle_1pnGThread__i_;
text: .text%__1cPClassFileParserXverify_legal_class_name6MnMsymbolHandle_pnGThread__v_; text: .text%__1cPClassFileParserXverify_legal_class_name6MnMsymbolHandle_pnGThread__v_;
text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_; text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_;
@ -1780,7 +1711,6 @@ text: .text%__1cUJvmtiEventControllerIvm_death6F_v_;
text: .text%__1cCosXterminate_signal_thread6F_v_; text: .text%__1cCosXterminate_signal_thread6F_v_;
text: .text%__1cCosNsigexitnum_pd6F_i_; text: .text%__1cCosNsigexitnum_pd6F_i_;
text: .text%__1cCosNsignal_notify6Fi_v_; text: .text%__1cCosNsignal_notify6Fi_v_;
text: .text%__1cFVTuneEexit6F_v_;
text: .text%__1cIVMThreadXwait_for_vm_thread_exit6F_v_; text: .text%__1cIVMThreadXwait_for_vm_thread_exit6F_v_;
text: .text%__1cUSafepointSynchronizeFbegin6F_v_; text: .text%__1cUSafepointSynchronizeFbegin6F_v_;
text: .text%__1cORuntimeServiceWrecord_safepoint_begin6F_v_; text: .text%__1cORuntimeServiceWrecord_safepoint_begin6F_v_;
@ -1795,7 +1725,6 @@ text: .text%__1cQSystemDictionaryRnumber_of_classes6F_i_;
text: .text%__1cQSystemDictionaryStry_get_next_class6F_pnMklassOopDesc__; text: .text%__1cQSystemDictionaryStry_get_next_class6F_pnMklassOopDesc__;
text: .text%__1cKDictionaryStry_get_next_class6M_pnMklassOopDesc__; text: .text%__1cKDictionaryStry_get_next_class6M_pnMklassOopDesc__;
text: .text%__1cNinstanceKlassKmethods_do6MpFpnNmethodOopDesc__v_v_; text: .text%__1cNinstanceKlassKmethods_do6MpFpnNmethodOopDesc__v_v_;
text: .text%__1cONMethodSweeperFsweep6F_v_;
text: .text%__1cNCompileBrokerQset_should_block6F_v_; text: .text%__1cNCompileBrokerQset_should_block6F_v_;
text: .text%__1cHVM_ExitbJwait_for_threads_in_native_to_block6F_i_; text: .text%__1cHVM_ExitbJwait_for_threads_in_native_to_block6F_i_;
text: .text%__1cIVMThreadHdestroy6F_v_; text: .text%__1cIVMThreadHdestroy6F_v_;
@ -1839,7 +1768,6 @@ text: .text%__1cMGraphBuilderNstore_indexed6MnJBasicType__v_;
text: .text%__1cIValueMapKkill_array6MpnJValueType__v_; text: .text%__1cIValueMapKkill_array6MpnJValueType__v_;
text: .text%__1cNCanonicalizerPdo_StoreIndexed6MpnMStoreIndexed__v_; text: .text%__1cNCanonicalizerPdo_StoreIndexed6MpnMStoreIndexed__v_;
text: .text%__1cQNullCheckVisitorPdo_StoreIndexed6MpnMStoreIndexed__v_; text: .text%__1cQNullCheckVisitorPdo_StoreIndexed6MpnMStoreIndexed__v_;
text: .text%__1cKValueStackElock6MpnHIRScope_pnLInstruction__i_;
text: .text%__1cKValueStackGunlock6M_i_; text: .text%__1cKValueStackGunlock6M_i_;
text: .text%__1cQMonitorEnterStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_sparc.o; text: .text%__1cQMonitorEnterStubFvisit6MpnQLIR_OpVisitState__v_: c1_CodeStubs_sparc.o;
text: .text%__1cNLIR_AssemblerJemit_lock6MpnKLIR_OpLock__v_; text: .text%__1cNLIR_AssemblerJemit_lock6MpnKLIR_OpLock__v_;
@ -1859,7 +1787,6 @@ text: .text%__1cTconstantPoolOopDescbCverify_constant_pool_resolve6FnSconstantPo
text: .text%__1cNCanonicalizerMdo_Intrinsic6MpnJIntrinsic__v_; text: .text%__1cNCanonicalizerMdo_Intrinsic6MpnJIntrinsic__v_;
text: .text%__1cMas_ValueType6FnKciConstant__pnJValueType__; text: .text%__1cMas_ValueType6FnKciConstant__pnJValueType__;
text: .text%__1cQNullCheckVisitorMdo_Intrinsic6MpnJIntrinsic__v_; text: .text%__1cQNullCheckVisitorMdo_Intrinsic6MpnJIntrinsic__v_;
text: .text%__1cDOp2Pinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorKdo_ShiftOp6MpnHShiftOp__v_; text: .text%__1cQNullCheckVisitorKdo_ShiftOp6MpnHShiftOp__v_;
text: .text%__1cILIR_ListLshift_right6MpnLLIR_OprDesc_222_v_; text: .text%__1cILIR_ListLshift_right6MpnLLIR_OprDesc_222_v_;
text: .text%__1cMNewTypeArrayKexact_type6kM_pnGciType__; text: .text%__1cMNewTypeArrayKexact_type6kM_pnGciType__;
@ -1876,7 +1803,6 @@ text: .text%__1cYciExceptionHandlerStreamEnext6M_v_: c1_IR.o;
text: .text%__1cLInstructionGmirror6Fn0AJCondition__1_; text: .text%__1cLInstructionGmirror6Fn0AJCondition__1_;
text: .text%__1cKBlockBeginVadd_exception_handler6Mp0_v_; text: .text%__1cKBlockBeginVadd_exception_handler6Mp0_v_;
text: .text%__1cIciObjectEhash6M_i_; text: .text%__1cIciObjectEhash6M_i_;
text: .text%__1cLAccessFieldPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cFChunk2n6FII_pv_; text: .text%__1cFChunk2n6FII_pv_;
text: .text%jni_CallStaticVoidMethodV: jni.o; text: .text%jni_CallStaticVoidMethodV: jni.o;
text: .text%JVM_GetLastErrorString; text: .text%JVM_GetLastErrorString;
@ -1899,8 +1825,6 @@ text: .text%__1cWCountInterfacesClosureEdoit6MpnMklassOopDesc_i_v_: klassVtable.
text: .text%__1cPfieldDescriptorRint_initial_value6kM_i_; text: .text%__1cPfieldDescriptorRint_initial_value6kM_i_;
text: .text%__1cSSetupItableClosureEdoit6MpnMklassOopDesc_i_v_: klassVtable.o; text: .text%__1cSSetupItableClosureEdoit6MpnMklassOopDesc_i_v_: klassVtable.o;
text: .text%__1cSInterpreterRuntimeOmultianewarray6FpnKJavaThread_pi_v_; text: .text%__1cSInterpreterRuntimeOmultianewarray6FpnKJavaThread_pi_v_;
text: .text%__1cNinstanceKlassSlookup_osr_nmethod6kMkpnNmethodOopDesc_i_pnHnmethod__;
text: .text%__1cQSimpleCompPolicyYmethod_back_branch_event6MnMmethodHandle_iipnGThread__v_;
text: .text%__1cMGraphBuilderQnew_object_array6M_v_; text: .text%__1cMGraphBuilderQnew_object_array6M_v_;
text: .text%__1cONewObjectArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o; text: .text%__1cONewObjectArrayFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerRdo_NewObjectArray6MpnONewObjectArray__v_; text: .text%__1cNCanonicalizerRdo_NewObjectArray6MpnONewObjectArray__v_;
@ -1919,7 +1843,6 @@ text: .text%__1cUBytecode_tableswitchOdest_offset_at6kMi_i_;
text: .text%__1cMGraphBuilderMtable_switch6M_v_; text: .text%__1cMGraphBuilderMtable_switch6M_v_;
text: .text%__1cLTableSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cLTableSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerOdo_TableSwitch6MpnLTableSwitch__v_; text: .text%__1cNCanonicalizerOdo_TableSwitch6MpnLTableSwitch__v_;
text: .text%__1cGSwitchPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorOdo_TableSwitch6MpnLTableSwitch__v_; text: .text%__1cQNullCheckVisitorOdo_TableSwitch6MpnLTableSwitch__v_;
text: .text%__1cSCompiledStaticCallNcompute_entry6FnMmethodHandle_rnOStaticCallInfo__v_; text: .text%__1cSCompiledStaticCallNcompute_entry6FnMmethodHandle_rnOStaticCallInfo__v_;
text: .text%__1cSCompiledStaticCallDset6MrknOStaticCallInfo__v_; text: .text%__1cSCompiledStaticCallDset6MrknOStaticCallInfo__v_;
@ -1972,7 +1895,6 @@ text: .text%jni_GetStaticObjectField: jni.o;
text: .text%__1cXJNI_ArgumentPusherVaArgIget_long6M_v_: jni.o; text: .text%__1cXJNI_ArgumentPusherVaArgIget_long6M_v_: jni.o;
text: .text%__1cINegateOpFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cINegateOpFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerLdo_NegateOp6MpnINegateOp__v_; text: .text%__1cNCanonicalizerLdo_NegateOp6MpnINegateOp__v_;
text: .text%__1cINegateOpPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorLdo_NegateOp6MpnINegateOp__v_; text: .text%__1cQNullCheckVisitorLdo_NegateOp6MpnINegateOp__v_;
text: .text%__1cILIR_ListLstore_check6MpnLLIR_OprDesc_2222pnMCodeEmitInfo__v_; text: .text%__1cILIR_ListLstore_check6MpnLLIR_OprDesc_2222pnMCodeEmitInfo__v_;
text: .text%__1cXArrayStoreExceptionStub2t6MpnMCodeEmitInfo__v_; text: .text%__1cXArrayStoreExceptionStub2t6MpnMCodeEmitInfo__v_;
@ -2008,13 +1930,11 @@ text: .text%__1cYjava_lang_reflect_MethodEslot6FpnHoopDesc__i_;
text: .text%__1cYjava_lang_reflect_MethodPparameter_types6FpnHoopDesc__2_; text: .text%__1cYjava_lang_reflect_MethodPparameter_types6FpnHoopDesc__2_;
text: .text%__1cYjava_lang_reflect_MethodLreturn_type6FpnHoopDesc__2_; text: .text%__1cYjava_lang_reflect_MethodLreturn_type6FpnHoopDesc__2_;
text: .text%JVM_IsInterrupted; text: .text%JVM_IsInterrupted;
text: .text%__1cMArithmeticOpKlock_stack6kM_pnKValueStack__: c1_Instruction.o;
text: .text%__1cNLIR_AssemblerIemit_op36MpnHLIR_Op3__v_; text: .text%__1cNLIR_AssemblerIemit_op36MpnHLIR_Op3__v_;
text: .text%__1cNLIR_AssemblerbCadd_debug_info_for_div0_here6MpnMCodeEmitInfo__v_; text: .text%__1cNLIR_AssemblerbCadd_debug_info_for_div0_here6MpnMCodeEmitInfo__v_;
text: .text%__1cNDivByZeroStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_sparc.o; text: .text%__1cNDivByZeroStubEinfo6kM_pnMCodeEmitInfo__: c1_CodeStubs_sparc.o;
text: .text%__1cNDivByZeroStubJemit_code6MpnNLIR_Assembler__v_; text: .text%__1cNDivByZeroStubJemit_code6MpnNLIR_Assembler__v_;
text: .text%__1cIFrameMapLnr2floatreg6Fi_pnRFloatRegisterImpl__; text: .text%__1cIFrameMapLnr2floatreg6Fi_pnRFloatRegisterImpl__;
text: .text%__1cRCompilationPolicybIreset_counter_for_invocation_event6MnMmethodHandle__v_;
text: .text%Unsafe_EnsureClassInitialized; text: .text%Unsafe_EnsureClassInitialized;
text: .text%__1cPClassFileParserYparse_checked_exceptions6MpHInSconstantPoolHandle_pnGThread__1_; text: .text%__1cPClassFileParserYparse_checked_exceptions6MpHInSconstantPoolHandle_pnGThread__1_;
text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_; text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_;
@ -2070,7 +1990,6 @@ text: .text%jni_NewIntArray: jni.o;
text: .text%__1cKGenerationInext_gen6kM_p0_; text: .text%__1cKGenerationInext_gen6kM_p0_;
text: .text%__1cQinstanceRefKlassZacquire_pending_list_lock6FpnJBasicLock__v_; text: .text%__1cQinstanceRefKlassZacquire_pending_list_lock6FpnJBasicLock__v_;
text: .text%__1cbAVM_GenCollectForAllocationEdoit6M_v_; text: .text%__1cbAVM_GenCollectForAllocationEdoit6M_v_;
text: .text%__1cPGCMemoryManagerIgc_begin6M_v_;
text: .text%__1cKManagementJtimestamp6F_x_; text: .text%__1cKManagementJtimestamp6F_x_;
text: .text%__1cTContiguousSpacePoolQget_memory_usage6M_nLMemoryUsage__; text: .text%__1cTContiguousSpacePoolQget_memory_usage6M_nLMemoryUsage__;
text: .text%__1cTContiguousSpacePoolNused_in_bytes6M_I_: memoryPool.o; text: .text%__1cTContiguousSpacePoolNused_in_bytes6M_I_: memoryPool.o;
@ -2094,8 +2013,6 @@ text: .text%__1cKSharedHeapbAchange_strong_roots_parity6M_v_;
text: .text%__1cPFastScanClosureGdo_oop6MppnHoopDesc__v_: defNewGeneration.o; text: .text%__1cPFastScanClosureGdo_oop6MppnHoopDesc__v_: defNewGeneration.o;
text: .text%__1cPContiguousSpaceIallocate6MI_pnIHeapWord__; text: .text%__1cPContiguousSpaceIallocate6MI_pnIHeapWord__;
text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_; text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_;
text: .text%__1cHThreadsHoops_do6FpnKOopClosure__v_;
text: .text%__1cKJavaThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cOJNIHandleBlockHoops_do6MpnKOopClosure__v_; text: .text%__1cOJNIHandleBlockHoops_do6MpnKOopClosure__v_;
text: .text%__1cKHandleAreaHoops_do6MpnKOopClosure__v_; text: .text%__1cKHandleAreaHoops_do6MpnKOopClosure__v_;
text: .text%__1cFframeVinterpreter_frame_bci6kM_i_; text: .text%__1cFframeVinterpreter_frame_bci6kM_i_;
@ -2124,8 +2041,6 @@ text: .text%__1cTMaskFillerForNative2t6MnMmethodHandle_pIi_v_: oopMapCache.o;
text: .text%__1cTMaskFillerForNativeLpass_object6M_v_: oopMapCache.o; text: .text%__1cTMaskFillerForNativeLpass_object6M_v_: oopMapCache.o;
text: .text%__1cTMaskFillerForNativeJpass_long6M_v_: oopMapCache.o; text: .text%__1cTMaskFillerForNativeJpass_long6M_v_: oopMapCache.o;
text: .text%__1cRComputeEntryStackHdo_long6M_v_: generateOopMap.o; text: .text%__1cRComputeEntryStackHdo_long6M_v_: generateOopMap.o;
text: .text%__1cIVMThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cGThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_; text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_;
text: .text%__1cMFlatProfilerHoops_do6FpnKOopClosure__v_; text: .text%__1cMFlatProfilerHoops_do6FpnKOopClosure__v_;
text: .text%__1cKManagementHoops_do6FpnKOopClosure__v_; text: .text%__1cKManagementHoops_do6FpnKOopClosure__v_;
@ -2221,7 +2136,6 @@ text: .text%__1cUGenGCEpilogueClosureNdo_generation6MpnKGeneration__v_: genColle
text: .text%__1cRTenuredGenerationPupdate_counters6M_v_; text: .text%__1cRTenuredGenerationPupdate_counters6M_v_;
text: .text%__1cUCompactingPermGenGenPupdate_counters6M_v_; text: .text%__1cUCompactingPermGenGenPupdate_counters6M_v_;
text: .text%__1cXTraceMemoryManagerStats2T6M_v_; text: .text%__1cXTraceMemoryManagerStats2T6M_v_;
text: .text%__1cPGCMemoryManagerGgc_end6M_v_;
text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_; text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_;
text: .text%__1cNJvmtiGCMarker2T6M_v_; text: .text%__1cNJvmtiGCMarker2T6M_v_;
text: .text%__1cPVM_GC_OperationNdoit_epilogue6M_v_; text: .text%__1cPVM_GC_OperationNdoit_epilogue6M_v_;
@ -2232,7 +2146,6 @@ text: .text%__1cLklassVtableQindex_of_miranda6MpnNsymbolOopDesc_2_i_;
text: .text%__1cNCanonicalizerPdo_MonitorEnter6MpnMMonitorEnter__v_; text: .text%__1cNCanonicalizerPdo_MonitorEnter6MpnMMonitorEnter__v_;
text: .text%__1cLMonitorExitFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cLMonitorExitFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerOdo_MonitorExit6MpnLMonitorExit__v_; text: .text%__1cNCanonicalizerOdo_MonitorExit6MpnLMonitorExit__v_;
text: .text%__1cNAccessMonitorPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_MonitorEnter6MpnMMonitorEnter__v_; text: .text%__1cQNullCheckVisitorPdo_MonitorEnter6MpnMMonitorEnter__v_;
text: .text%__1cQNullCheckVisitorOdo_MonitorExit6MpnLMonitorExit__v_; text: .text%__1cQNullCheckVisitorOdo_MonitorExit6MpnLMonitorExit__v_;
text: .text%__1cMLongConstantPas_LongConstant6M_p0_: c1_Canonicalizer.o; text: .text%__1cMLongConstantPas_LongConstant6M_p0_: c1_Canonicalizer.o;
@ -2246,10 +2159,7 @@ text: .text%__1cRComputeEntryStackIdo_short6M_v_: generateOopMap.o;
text: .text%Unsafe_AllocateInstance; text: .text%Unsafe_AllocateInstance;
text: .text%jni_AllocObject: jni.o; text: .text%jni_AllocObject: jni.o;
text: .text%__1cNCanonicalizerMset_constant6Mi_v_: c1_Canonicalizer.o; text: .text%__1cNCanonicalizerMset_constant6Mi_v_: c1_Canonicalizer.o;
text: .text%__1cJTypeCheckPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cIBytecodeIset_code6MnJBytecodesECode__v_;
text: .text%__1cQComputeCallStackIdo_float6M_v_: generateOopMap.o; text: .text%__1cQComputeCallStackIdo_float6M_v_: generateOopMap.o;
text: .text%__1cFframeRoops_code_blob_do6MpnKOopClosure_pknLRegisterMap__v_;
text: .text%__1cMOopMapStreamJfind_next6M_v_; text: .text%__1cMOopMapStreamJfind_next6M_v_;
text: .text%__1cQinstanceRefKlassSoop_oop_iterate_nv6MpnHoopDesc_pnQFilteringClosure__i_; text: .text%__1cQinstanceRefKlassSoop_oop_iterate_nv6MpnHoopDesc_pnQFilteringClosure__i_;
text: .text%__1cQinstanceRefKlassUoop_oop_iterate_nv_m6MpnHoopDesc_pnQFilteringClosure_nJMemRegion__i_; text: .text%__1cQinstanceRefKlassUoop_oop_iterate_nv_m6MpnHoopDesc_pnQFilteringClosure_nJMemRegion__i_;
@ -2284,7 +2194,6 @@ text: .text%__1cLsymbolKlassToop_follow_contents6MpnHoopDesc__v_;
text: .text%__1cQinstanceRefKlassToop_follow_contents6MpnHoopDesc__v_; text: .text%__1cQinstanceRefKlassToop_follow_contents6MpnHoopDesc__v_;
text: .text%__1cQconstMethodKlassToop_follow_contents6MpnHoopDesc__v_; text: .text%__1cQconstMethodKlassToop_follow_contents6MpnHoopDesc__v_;
text: .text%__1cFJNIidHoops_do6MpnKOopClosure__v_; text: .text%__1cFJNIidHoops_do6MpnKOopClosure__v_;
text: .text%__1cHnmethodHoops_do6MpnKOopClosure__v_;
text: .text%__1cQSystemDictionaryValways_strong_oops_do6FpnKOopClosure__v_; text: .text%__1cQSystemDictionaryValways_strong_oops_do6FpnKOopClosure__v_;
text: .text%__1cKDictionaryYalways_strong_classes_do6MpnKOopClosure__v_; text: .text%__1cKDictionaryYalways_strong_classes_do6MpnKOopClosure__v_;
text: .text%__1cVLoaderConstraintTableYalways_strong_classes_do6MpnKOopClosure__v_; text: .text%__1cVLoaderConstraintTableYalways_strong_classes_do6MpnKOopClosure__v_;
@ -2341,9 +2250,6 @@ text: .text%__1cVcompiledICHolderKlassOklass_oop_size6kM_i_: compiledICHolderKla
text: .text%__1cJHashtableHoops_do6MpnKOopClosure__v_; text: .text%__1cJHashtableHoops_do6MpnKOopClosure__v_;
text: .text%__1cQSystemDictionaryYalways_strong_classes_do6FpnKOopClosure__v_; text: .text%__1cQSystemDictionaryYalways_strong_classes_do6FpnKOopClosure__v_;
text: .text%__1cQSystemDictionaryPplaceholders_do6FpnKOopClosure__v_; text: .text%__1cQSystemDictionaryPplaceholders_do6FpnKOopClosure__v_;
text: .text%__1cSDeoptimizationBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cLRuntimeStubHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cNSafepointBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cJMarkSweepMadjust_marks6F_v_; text: .text%__1cJMarkSweepMadjust_marks6F_v_;
text: .text%__1cYGenAdjustPointersClosureNdo_generation6MpnKGeneration__v_: genMarkSweep.o; text: .text%__1cYGenAdjustPointersClosureNdo_generation6MpnKGeneration__v_: genMarkSweep.o;
text: .text%__1cKGenerationPadjust_pointers6M_v_; text: .text%__1cKGenerationPadjust_pointers6M_v_;
@ -2379,32 +2285,24 @@ text: .text%__1cIRuntime1Onew_type_array6FpnKJavaThread_pnMklassOopDesc_i_v_;
text: .text%__1cVcompiledICHolderKlassRoop_oop_iterate_m6MpnHoopDesc_pnKOopClosure_nJMemRegion__i_; text: .text%__1cVcompiledICHolderKlassRoop_oop_iterate_m6MpnHoopDesc_pnKOopClosure_nJMemRegion__i_;
text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_; text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_;
text: .text%__1cOMacroAssemblerEmult6MpnMRegisterImpl_22_v_; text: .text%__1cOMacroAssemblerEmult6MpnMRegisterImpl_22_v_;
text: .text%__1cINewArrayPother_values_do6MpFppnLInstruction__v_v_;
text: .text%__1cJValueTypeLas_LongType6M_pnILongType__: c1_Canonicalizer.o; text: .text%__1cJValueTypeLas_LongType6M_pnILongType__: c1_Canonicalizer.o;
text: .text%__1cNVM_DeoptimizeEdoit6M_v_; text: .text%__1cNVM_DeoptimizeEdoit6M_v_;
text: .text%__1cODeoptimizationVdeoptimize_dependents6F_i_; text: .text%__1cODeoptimizationVdeoptimize_dependents6F_i_;
text: .text%__1cHThreadsbFdeoptimized_wrt_marked_nmethods6F_v_; text: .text%__1cHThreadsbFdeoptimized_wrt_marked_nmethods6F_v_;
text: .text%__1cKJavaThreadbFdeoptimized_wrt_marked_nmethods6M_v_; text: .text%__1cKJavaThreadbFdeoptimized_wrt_marked_nmethods6M_v_;
text: .text%__1cJCodeCachebGmake_marked_nmethods_not_entrant6F_v_; text: .text%__1cJCodeCachebGmake_marked_nmethods_not_entrant6F_v_;
text: .text%__1cHnmethodbAmake_not_entrant_or_zombie6Mi_v_;
text: .text%__1cKNativeJumpUpatch_verified_entry6FpC11_v_; text: .text%__1cKNativeJumpUpatch_verified_entry6FpC11_v_;
text: .text%signalHandler; text: .text%signalHandler;
text: .text%JVM_handle_solaris_signal; text: .text%JVM_handle_solaris_signal;
text: .text%JVM_HoldsLock; text: .text%JVM_HoldsLock;
text: .text%__1cQinstanceRefKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceRefKlass.o; text: .text%__1cQinstanceRefKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceRefKlass.o;
text: .text%__1cJCodeCacheFfirst6F_pnICodeBlob__; text: .text%__1cJCodeCacheFfirst6F_pnICodeBlob__;
text: .text%__1cHThreadsLnmethods_do6F_v_;
text: .text%__1cFframeLnmethods_do6M_v_;
text: .text%__1cJCodeCacheEnext6FpnICodeBlob__2_; text: .text%__1cJCodeCacheEnext6FpnICodeBlob__2_;
text: .text%__1cONewObjectArrayKexact_type6kM_pnGciType__; text: .text%__1cONewObjectArrayKexact_type6kM_pnGciType__;
text: .text%__1cPciObjArrayKlassNelement_klass6M_pnHciKlass__; text: .text%__1cPciObjArrayKlassNelement_klass6M_pnHciKlass__;
text: .text%__1cIRuntime1Noop_arraycopy6FpnIHeapWord_2i_v_; text: .text%__1cIRuntime1Noop_arraycopy6FpnIHeapWord_2i_v_;
text: .text%__1cMGraphBuilderKcompare_op6MpnJValueType_nJBytecodesECode__v_; text: .text%__1cMGraphBuilderKcompare_op6MpnJValueType_nJBytecodesECode__v_;
text: .text%__1cJCompareOpFvisit6MpnSInstructionVisitor__v_: c1_Instruction.o;
text: .text%__1cNCanonicalizerMdo_CompareOp6MpnJCompareOp__v_; text: .text%__1cNCanonicalizerMdo_CompareOp6MpnJCompareOp__v_;
text: .text%__1cJCompareOpEhash6kM_i_: c1_Instruction.o;
text: .text%__1cJCompareOpEname6kM_pkc_: c1_Instruction.o;
text: .text%__1cJCompareOpMas_CompareOp6M_p0_: c1_Instruction.o;
text: .text%__1cJValueTypeOas_IntConstant6M_pnLIntConstant__: c1_Canonicalizer.o; text: .text%__1cJValueTypeOas_IntConstant6M_pnLIntConstant__: c1_Canonicalizer.o;
text: .text%__1cHIntTypeKas_IntType6M_p0_: c1_Canonicalizer.o; text: .text%__1cHIntTypeKas_IntType6M_p0_: c1_Canonicalizer.o;
text: .text%__1cNSharedRuntimeDf2l6Ff_x_; text: .text%__1cNSharedRuntimeDf2l6Ff_x_;
@ -2422,7 +2320,6 @@ text: .text%__1cRComputeEntryStackIdo_float6M_v_: generateOopMap.o;
text: .text%__1cTMaskFillerForNativeIpass_int6M_v_: oopMapCache.o; text: .text%__1cTMaskFillerForNativeIpass_int6M_v_: oopMapCache.o;
text: .text%__1cHnmethodSflush_dependencies6MpnRBoolObjectClosure__v_; text: .text%__1cHnmethodSflush_dependencies6MpnRBoolObjectClosure__v_;
text: .text%__1cNinstanceKlassYremove_dependent_nmethod6MpnHnmethod__v_; text: .text%__1cNinstanceKlassYremove_dependent_nmethod6MpnHnmethod__v_;
text: .text%__1cFVTuneOdelete_nmethod6FpnHnmethod__v_;
text: .text%__1cLCardTableRSFclear6MnJMemRegion__v_: cardTableRS.o; text: .text%__1cLCardTableRSFclear6MnJMemRegion__v_: cardTableRS.o;
text: .text%__1cRCardTableModRefBSFclear6MnJMemRegion__v_; text: .text%__1cRCardTableModRefBSFclear6MnJMemRegion__v_;
text: .text%__1cHnmethodFflush6M_v_; text: .text%__1cHnmethodFflush6M_v_;
@ -2445,9 +2342,7 @@ text: .text%__1cNCanonicalizerPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_;
text: .text%__1cMGraphBuilderNlookup_switch6M_v_; text: .text%__1cMGraphBuilderNlookup_switch6M_v_;
text: .text%__1cMLookupSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o; text: .text%__1cMLookupSwitchFvisit6MpnSInstructionVisitor__v_: c1_GraphBuilder.o;
text: .text%__1cNCanonicalizerPdo_LookupSwitch6MpnMLookupSwitch__v_; text: .text%__1cNCanonicalizerPdo_LookupSwitch6MpnMLookupSwitch__v_;
text: .text%__1cMUnsafePutRawPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_UnsafePutRaw6MpnMUnsafePutRaw__v_; text: .text%__1cQNullCheckVisitorPdo_UnsafePutRaw6MpnMUnsafePutRaw__v_;
text: .text%__1cLUnsafeRawOpPinput_values_do6MpFppnLInstruction__v_v_: c1_GraphBuilder.o;
text: .text%__1cQNullCheckVisitorPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_; text: .text%__1cQNullCheckVisitorPdo_UnsafeGetRaw6MpnMUnsafeGetRaw__v_;
text: .text%__1cQNullCheckVisitorPdo_LookupSwitch6MpnMLookupSwitch__v_; text: .text%__1cQNullCheckVisitorPdo_LookupSwitch6MpnMLookupSwitch__v_;
text: .text%__1cIRuntime1Mnew_instance6FpnKJavaThread_pnMklassOopDesc__v_; text: .text%__1cIRuntime1Mnew_instance6FpnKJavaThread_pnMklassOopDesc__v_;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,6 @@ text = LOAD ?RXO;
text: .text%__1cCosOjavaTimeMillis6F_x_; text: .text%__1cCosOjavaTimeMillis6F_x_;
text: .text%__1cQIndexSetIteratorQadvance_and_next6M_I_; text: .text%__1cQIndexSetIteratorQadvance_and_next6M_I_;
text: .text%__1cNinstanceKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cNinstanceKlassToop_adjust_pointers6MpnHoopDesc__i_; text: .text%__1cNinstanceKlassToop_adjust_pointers6MpnHoopDesc__i_;
text: .text%__1cNinstanceKlassToop_follow_contents6MpnHoopDesc__v_; text: .text%__1cNinstanceKlassToop_follow_contents6MpnHoopDesc__v_;
text: .text%__1cOtypeArrayKlassToop_adjust_pointers6MpnHoopDesc__i_; text: .text%__1cOtypeArrayKlassToop_adjust_pointers6MpnHoopDesc__i_;
@ -33,7 +32,6 @@ text: .text%__1cETypeFuhash6Fkpk0_i_;
text: .text%__1cQIndexSetIteratorEnext6M_I_: chaitin.o; text: .text%__1cQIndexSetIteratorEnext6M_I_: chaitin.o;
text: .text%__1cENodeIout_grow6MI_v_; text: .text%__1cENodeIout_grow6MI_v_;
text: .text%__1cOloadConI13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cOloadConI13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cNobjArrayKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cENodeHadd_req6Mp0_v_; text: .text%__1cENodeHadd_req6Mp0_v_;
text: .text%__1cJMarkSweepUAdjustPointerClosureGdo_oop6MppnHoopDesc__v_: markSweep.o; text: .text%__1cJMarkSweepUAdjustPointerClosureGdo_oop6MppnHoopDesc__v_: markSweep.o;
text: .text%__1cNobjArrayKlassToop_follow_contents6MpnHoopDesc__v_; text: .text%__1cNobjArrayKlassToop_follow_contents6MpnHoopDesc__v_;
@ -45,7 +43,6 @@ text: .text%__1cHPhiNodeGOpcode6kM_i_;
text: .text%__1cKbranchNodeNis_block_proj6kM_pknENode__: ad_sparc_misc.o; text: .text%__1cKbranchNodeNis_block_proj6kM_pknENode__: ad_sparc_misc.o;
text: .text%__1cIProjNodeGOpcode6kM_i_; text: .text%__1cIProjNodeGOpcode6kM_i_;
text: .text%__1cETypeIhashcons6M_pk0_; text: .text%__1cETypeIhashcons6M_pk0_;
text: .text%__1cOPhaseIdealLoopUbuild_loop_late_post6MpnENode_pk0_v_;
text: .text%__1cMPhaseChaitinTinterfere_with_live6MIpnIIndexSet__v_; text: .text%__1cMPhaseChaitinTinterfere_with_live6MIpnIIndexSet__v_;
text: .text%__1cWNode_Backward_IteratorEnext6M_pnENode__; text: .text%__1cWNode_Backward_IteratorEnext6M_pnENode__;
text: .text%__1cNIdealLoopTreeJis_member6kMpk0_i_; text: .text%__1cNIdealLoopTreeJis_member6kMpk0_i_;
@ -154,7 +151,6 @@ text: .text%__1cETypeFxmeet6kMpk0_2_;
text: .text%__1cILRG_ListGextend6MII_v_; text: .text%__1cILRG_ListGextend6MII_v_;
text: .text%__1cJVectorSet2F6kMI_i_; text: .text%__1cJVectorSet2F6kMI_i_;
text: .text%__1cENodeQIdeal_DU_postCCP6MpnIPhaseCCP__p0_; text: .text%__1cENodeQIdeal_DU_postCCP6MpnIPhaseCCP__p0_;
text: .text%__1cOtypeArrayKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cIProjNodeEhash6kM_I_; text: .text%__1cIProjNodeEhash6kM_I_;
text: .text%__1cIAddINodeGOpcode6kM_i_; text: .text%__1cIAddINodeGOpcode6kM_i_;
text: .text%__1cIIndexSet2t6Mp0_v_; text: .text%__1cIIndexSet2t6Mp0_v_;
@ -168,7 +164,6 @@ text: .text%__1cICmpPNodeGOpcode6kM_i_;
text: .text%__1cKNode_ArrayGremove6MI_v_; text: .text%__1cKNode_ArrayGremove6MI_v_;
text: .text%__1cHPhiNodeEhash6kM_I_; text: .text%__1cHPhiNodeEhash6kM_I_;
text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__; text: .text%__1cLSymbolTableGlookup6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cKoopFactoryKnew_symbol6FpkcipnGThread__pnNsymbolOopDesc__;
text: .text%__1cKmethodOperJnum_edges6kM_I_: ad_sparc.o; text: .text%__1cKmethodOperJnum_edges6kM_I_: ad_sparc.o;
text: .text%__1cJStartNodeLbottom_type6kM_pknEType__; text: .text%__1cJStartNodeLbottom_type6kM_pknEType__;
text: .text%__1cHTypeIntFxmeet6kMpknEType__3_; text: .text%__1cHTypeIntFxmeet6kMpknEType__3_;
@ -502,7 +497,6 @@ text: .text%__1cNObjectMonitorFenter6MpnGThread__v_;
text: .text%__1cENodeKreplace_by6Mp0_v_; text: .text%__1cENodeKreplace_by6Mp0_v_;
text: .text%__1cSObjectSynchronizerJslow_exit6FpnHoopDesc_pnJBasicLock_pnGThread__v_; text: .text%__1cSObjectSynchronizerJslow_exit6FpnHoopDesc_pnJBasicLock_pnGThread__v_;
text: .text%__1cMMergeMemNodePiteration_setup6Mpk0_v_; text: .text%__1cMMergeMemNodePiteration_setup6Mpk0_v_;
text: .text%__1cFKlassNlookup_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cKDictionaryEfind6MiInMsymbolHandle_nGHandle_2pnGThread__pnMklassOopDesc__; text: .text%__1cKDictionaryEfind6MiInMsymbolHandle_nGHandle_2pnGThread__pnMklassOopDesc__;
text: .text%__1cRMachSpillCopyNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cRMachSpillCopyNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cKRegionNodeIIdentity6MpnOPhaseTransform__pnENode__; text: .text%__1cKRegionNodeIIdentity6MpnOPhaseTransform__pnENode__;
@ -605,7 +599,6 @@ text: .text%__1cMTypeKlassPtrEhash6kM_i_;
text: .text%__1cMCallLeafNodeGOpcode6kM_i_; text: .text%__1cMCallLeafNodeGOpcode6kM_i_;
text: .text%__1cSCallLeafDirectNodeIpipeline6kM_pknIPipeline__; text: .text%__1cSCallLeafDirectNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cHPhiNodeEmake6FpnENode_2pknEType_pknHTypePtr__p0_; text: .text%__1cHPhiNodeEmake6FpnENode_2pknEType_pknHTypePtr__p0_;
text: .text%__1cIAddPNodeQmach_bottom_type6FpknIMachNode__pknEType__;
text: .text%__1cOcompU_iRegNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cOcompU_iRegNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cJiRegLOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cJiRegLOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cNflagsRegPOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cNflagsRegPOperKin_RegMask6kMi_pknHRegMask__;
@ -640,7 +633,6 @@ text: .text%__1cJStartNodeGOpcode6kM_i_;
text: .text%__1cQregF_to_stkINodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQregF_to_stkINodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cENodeDcmp6kMrk0_I_; text: .text%__1cENodeDcmp6kMrk0_I_;
text: .text%__1cHTypeIntFxdual6kM_pknEType__; text: .text%__1cHTypeIntFxdual6kM_pknEType__;
text: .text%__1cIciObjectIencoding6M_pnI_jobject__;
text: .text%__1cMmerge_region6FpnKRegionNode_pnIPhaseGVN__pnENode__: cfgnode.o; text: .text%__1cMmerge_region6FpnKRegionNode_pnIPhaseGVN__pnENode__: cfgnode.o;
text: .text%__1cJAssemblerOpatched_branch6Fiii_i_; text: .text%__1cJAssemblerOpatched_branch6Fiii_i_;
text: .text%__1cJAssemblerSbranch_destination6Fii_i_; text: .text%__1cJAssemblerSbranch_destination6Fii_i_;
@ -738,10 +730,7 @@ text: .text%__1cMLinkResolverOresolve_invoke6FrnICallInfo_nGHandle_nSconstantPoo
text: .text%__1cIBoolNodeJideal_reg6kM_I_: subnode.o; text: .text%__1cIBoolNodeJideal_reg6kM_I_: subnode.o;
text: .text%__1cHCmpNodeJideal_reg6kM_I_: classes.o; text: .text%__1cHCmpNodeJideal_reg6kM_I_: classes.o;
text: .text%__1cRloadConP_pollNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cRloadConP_pollNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cETypeFwiden6kMpk0_2_: type.o;
text: .text%__1cLstoreI0NodeIpipeline6kM_pknIPipeline__; text: .text%__1cLstoreI0NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cFciEnvTget_method_by_index6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cFciEnvYget_method_by_index_impl6MpnPciInstanceKlass_inJBytecodesECode__pnIciMethod__;
text: .text%__1cMloadConPNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cMloadConPNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cFciEnvNlookup_method6MpnNinstanceKlass_2pnNsymbolOopDesc_4nJBytecodesECode__pnNmethodOopDesc__; text: .text%__1cFciEnvNlookup_method6MpnNinstanceKlass_2pnNsymbolOopDesc_4nJBytecodesECode__pnNmethodOopDesc__;
text: .text%__1cKDictionaryKfind_class6MiInMsymbolHandle_nGHandle__pnMklassOopDesc__; text: .text%__1cKDictionaryKfind_class6MiInMsymbolHandle_nGHandle__pnMklassOopDesc__;
@ -755,7 +744,6 @@ text: .text%__1cSInterpreterRuntimeMmonitorenter6FpnKJavaThread_pnPBasicObjectLo
text: .text%__1cSInterpreterRuntimePresolve_get_put6FpnKJavaThread_nJBytecodesECode__v_; text: .text%__1cSInterpreterRuntimePresolve_get_put6FpnKJavaThread_nJBytecodesECode__v_;
text: .text%__1cQsubI_reg_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQsubI_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cXmembar_acquire_lockNodeIpipeline6kM_pknIPipeline__; text: .text%__1cXmembar_acquire_lockNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cQaddP_reg_regNodeLbottom_type6kM_pknEType__: ad_sparc_misc.o;
text: .text%__1cPCountedLoopNodeGOpcode6kM_i_; text: .text%__1cPCountedLoopNodeGOpcode6kM_i_;
text: .text%__1cSInterpreterRuntimeLmonitorexit6FpnKJavaThread_pnPBasicObjectLock__v_; text: .text%__1cSInterpreterRuntimeLmonitorexit6FpnKJavaThread_pnPBasicObjectLock__v_;
text: .text%__1cIAndLNodeGOpcode6kM_i_; text: .text%__1cIAndLNodeGOpcode6kM_i_;
@ -826,7 +814,6 @@ text: .text%__1cURethrowExceptionNodeNis_block_proj6kM_pknENode__: ad_sparc_misc
text: .text%__1cQSystemDictionarybOfind_constrained_instance_or_array_klass6FnMsymbolHandle_nGHandle_pnGThread__pnMklassOopDesc__; text: .text%__1cQSystemDictionarybOfind_constrained_instance_or_array_klass6FnMsymbolHandle_nGHandle_pnGThread__pnMklassOopDesc__;
text: .text%__1cQsubI_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQsubI_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cJloadBNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cJloadBNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cIciSymbol2t6MnMsymbolHandle__v_;
text: .text%__1cQaddP_reg_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cQaddP_reg_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cKmethodOperGmethod6kM_i_: ad_sparc.o; text: .text%__1cKmethodOperGmethod6kM_i_: ad_sparc.o;
text: .text%__1cFKlassIsubklass6kM_p0_; text: .text%__1cFKlassIsubklass6kM_p0_;
@ -946,7 +933,6 @@ text: .text%__1cIGraphKitOreplace_in_map6MpnENode_2_v_;
text: .text%__1cNinstanceKlassLfind_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassLfind_method6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cHCompileKTracePhase2T6M_v_; text: .text%__1cHCompileKTracePhase2T6M_v_;
text: .text%__1cMPhaseChaitinLclone_projs6MpnFBlock_IpnENode_4rI_i_; text: .text%__1cMPhaseChaitinLclone_projs6MpnFBlock_IpnENode_4rI_i_;
text: .text%__1cNinstanceKlassSlookup_osr_nmethod6kMkpnNmethodOopDesc_i_pnHnmethod__;
text: .text%__1cIJVMState2t6MpnIciMethod_p0_v_; text: .text%__1cIJVMState2t6MpnIciMethod_p0_v_;
text: .text%__1cIHaltNode2t6MpnENode_2_v_; text: .text%__1cIHaltNode2t6MpnENode_2_v_;
text: .text%__1cLOptoRuntimeSuncommon_trap_Type6F_pknITypeFunc__; text: .text%__1cLOptoRuntimeSuncommon_trap_Type6F_pknITypeFunc__;
@ -954,7 +940,6 @@ text: .text%__1cJloadLNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cSsafePoint_pollNodePoper_input_base6kM_I_: ad_sparc_misc.o; text: .text%__1cSsafePoint_pollNodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cINodeHashJhash_find6MpknENode__p1_; text: .text%__1cINodeHashJhash_find6MpknENode__p1_;
text: .text%__1cQmulL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cQmulL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cSaddP_reg_imm13NodeLbottom_type6kM_pknEType__: ad_sparc_misc.o;
text: .text%__1cOMethodLivenessKBasicBlock2t6Mp0ii_v_; text: .text%__1cOMethodLivenessKBasicBlock2t6Mp0ii_v_;
text: .text%__1cOMethodLivenessKBasicBlockQcompute_gen_kill6MpnIciMethod__v_; text: .text%__1cOMethodLivenessKBasicBlockQcompute_gen_kill6MpnIciMethod__v_;
text: .text%__1cOGenerateOopMapFppush6MpnNCellTypeState__v_; text: .text%__1cOGenerateOopMapFppush6MpnNCellTypeState__v_;
@ -987,7 +972,6 @@ text: .text%__1cFParseHdo_call6M_v_;
text: .text%__1cNloadConP0NodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cNloadConP0NodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cHMulNodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cHMulNodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cMPhaseIterGVNJtransform6MpnENode__2_; text: .text%__1cMPhaseIterGVNJtransform6MpnENode__2_;
text: .text%__1cHTypeIntFwiden6kMpknEType__3_;
text: .text%__1cSsafePoint_pollNodeIpipeline6kM_pknIPipeline__; text: .text%__1cSsafePoint_pollNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cJloadSNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cJloadSNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cKarrayKlassLobject_size6kMi_i_; text: .text%__1cKarrayKlassLobject_size6kMi_i_;
@ -1019,7 +1003,6 @@ text: .text%__1cIBoolNodeHsize_of6kM_I_;
text: .text%__1cSobjArrayKlassKlassIoop_size6kMpnHoopDesc__i_: objArrayKlassKlass.o; text: .text%__1cSobjArrayKlassKlassIoop_size6kMpnHoopDesc__i_: objArrayKlassKlass.o;
text: .text%__1cPcompP_iRegPNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cPcompP_iRegPNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cJloadPNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cJloadPNodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cPBytecode_invokeJsignature6kM_pnNsymbolOopDesc__;
text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__; text: .text%__1cFframebGinterpreter_callee_receiver_addr6MnMsymbolHandle__ppnHoopDesc__;
text: .text%__1cNSignatureInfoGdo_int6M_v_: bytecode.o; text: .text%__1cNSignatureInfoGdo_int6M_v_: bytecode.o;
text: .text%__1cOstackSlotLOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cOstackSlotLOperKin_RegMask6kMi_pknHRegMask__;
@ -1038,9 +1021,7 @@ text: .text%__1cMloadConINodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cGIfNodeHsize_of6kM_I_: classes.o; text: .text%__1cGIfNodeHsize_of6kM_I_: classes.o;
text: .text%__1cPconvL2I_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cPconvL2I_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cIimmLOperJconstantL6kM_x_: ad_sparc_clone.o; text: .text%__1cIimmLOperJconstantL6kM_x_: ad_sparc_clone.o;
text: .text%__1cTStackWalkCompPolicyRcompilation_level6MnMmethodHandle_i_i_;
text: .text%jni_GetByteArrayRegion: jni.o; text: .text%jni_GetByteArrayRegion: jni.o;
text: .text%__1cIGraphKitTset_all_memory_call6MpnENode__v_;
text: .text%__1cSHighResTimeSamplerLtake_sample6M_x_: statSampler.o; text: .text%__1cSHighResTimeSamplerLtake_sample6M_x_: statSampler.o;
text: .text%__1cHCompileFstart6kM_pnJStartNode__; text: .text%__1cHCompileFstart6kM_pnJStartNode__;
text: .text%__1cPStatSamplerTaskEtask6M_v_: statSampler.o; text: .text%__1cPStatSamplerTaskEtask6M_v_: statSampler.o;
@ -1082,7 +1063,6 @@ text: .text%__1cXinitialize_static_field6FpnPfieldDescriptor_pnGThread__v_: clas
text: .text%__1cURethrowExceptionNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cURethrowExceptionNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cOJNIHandleBlockOallocate_block6FpnGThread__p0_; text: .text%__1cOJNIHandleBlockOallocate_block6FpnGThread__p0_;
text: .text%__1cNSignatureInfoHdo_bool6M_v_: bytecode.o; text: .text%__1cNSignatureInfoHdo_bool6M_v_: bytecode.o;
text: .text%__1cKBufferBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cSandI_reg_imm13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cSandI_reg_imm13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cIAddINodeIadd_ring6kMpknEType_3_3_; text: .text%__1cIAddINodeIadd_ring6kMpknEType_3_3_;
text: .text%__1cLTypeInstPtrQcast_to_ptr_type6kMnHTypePtrDPTR__pknEType__; text: .text%__1cLTypeInstPtrQcast_to_ptr_type6kMnHTypePtrDPTR__pknEType__;
@ -1095,7 +1075,6 @@ text: .text%__1cPorI_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cLRethrowNodeGOpcode6kM_i_; text: .text%__1cLRethrowNodeGOpcode6kM_i_;
text: .text%__1cJloadSNodeIpipeline6kM_pknIPipeline__; text: .text%__1cJloadSNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cICodeHeapIcapacity6kM_I_; text: .text%__1cICodeHeapIcapacity6kM_I_;
text: .text%__1cKMemoryPoolImax_size6kM_I_: memoryPool.o;
text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o; text: .text%__1cMCodeHeapPoolNused_in_bytes6M_I_: memoryPool.o;
text: .text%__1cPcmpFastLockNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cPcmpFastLockNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__; text: .text%__1cMCodeHeapPoolQget_memory_usage6M_nLMemoryUsage__;
@ -1136,7 +1115,6 @@ text: .text%__1cJloadSNodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cIJumpDataKcell_count6M_i_: ciMethodData.o; text: .text%__1cIJumpDataKcell_count6M_i_: ciMethodData.o;
text: .text%__1cObranchConPNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cObranchConPNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cITypeFuncFxdual6kM_pknEType__; text: .text%__1cITypeFuncFxdual6kM_pknEType__;
text: .text%__1cQjava_lang_StringGlength6FpnHoopDesc__i_;
text: .text%__1cFStateM_sub_Op_CmpI6MpknENode__v_; text: .text%__1cFStateM_sub_Op_CmpI6MpknENode__v_;
text: .text%__1cJcmpOpOperFccode6kM_i_: ad_sparc_clone.o; text: .text%__1cJcmpOpOperFccode6kM_i_: ad_sparc_clone.o;
text: .text%__1cGciType2t6MnLKlassHandle__v_; text: .text%__1cGciType2t6MnLKlassHandle__v_;
@ -1200,7 +1178,6 @@ text: .text%__1cQmulL_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cLstoreP0NodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cLstoreP0NodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cLRethrowNodeKmatch_edge6kMI_I_; text: .text%__1cLRethrowNodeKmatch_edge6kMI_I_;
text: .text%__1cFTypeFEhash6kM_i_; text: .text%__1cFTypeFEhash6kM_i_;
text: .text%__1cHnmethodHoops_do6MpnKOopClosure__v_;
text: .text%__1cFStateM_sub_Op_AddI6MpknENode__v_; text: .text%__1cFStateM_sub_Op_AddI6MpknENode__v_;
text: .text%__1cOParseGeneratorIgenerate6MpnIJVMState__2_; text: .text%__1cOParseGeneratorIgenerate6MpnIJVMState__2_;
text: .text%__1cFParseQcreate_entry_map6M_pnNSafePointNode__; text: .text%__1cFParseQcreate_entry_map6M_pnNSafePointNode__;
@ -1213,17 +1190,13 @@ text: .text%__1cFParsePdo_method_entry6M_v_;
text: .text%__1cNCallGeneratorKfor_inline6FpnIciMethod_f_p0_; text: .text%__1cNCallGeneratorKfor_inline6FpnIciMethod_f_p0_;
text: .text%__1cbGJvmtiVMObjectAllocEventCollector2t6M_v_; text: .text%__1cbGJvmtiVMObjectAllocEventCollector2t6M_v_;
text: .text%__1cbGJvmtiVMObjectAllocEventCollector2T6M_v_; text: .text%__1cbGJvmtiVMObjectAllocEventCollector2T6M_v_;
text: .text%__1cQconstMethodKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cRciVirtualCallDataOtranslate_from6MpnLProfileData__v_; text: .text%__1cRciVirtualCallDataOtranslate_from6MpnLProfileData__v_;
text: .text%jni_IsSameObject: jni.o; text: .text%jni_IsSameObject: jni.o;
text: .text%__1cMloadConINodeIpipeline6kM_pknIPipeline__; text: .text%__1cMloadConINodeIpipeline6kM_pknIPipeline__;
text: .text%__1cNbranchConNodeJlabel_set6MrnFLabel_I_v_; text: .text%__1cNbranchConNodeJlabel_set6MrnFLabel_I_v_;
text: .text%__1cNbranchConNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cNbranchConNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cQandL_reg_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQandL_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cLmethodKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cLsymbolKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cIciObjectFklass6M_pnHciKlass__; text: .text%__1cIciObjectFklass6M_pnHciKlass__;
text: .text%__1cLSymbolTableFprobe6Fpkci_pnNsymbolOopDesc__;
text: .text%__1cPThreadLocalNodeGOpcode6kM_i_; text: .text%__1cPThreadLocalNodeGOpcode6kM_i_;
text: .text%__1cZPhaseConservativeCoalesceKupdate_ifg6MIIpnIIndexSet_2_v_; text: .text%__1cZPhaseConservativeCoalesceKupdate_ifg6MIIpnIIndexSet_2_v_;
text: .text%__1cZPhaseConservativeCoalesceMunion_helper6MpnENode_2II222pnFBlock_I_v_; text: .text%__1cZPhaseConservativeCoalesceMunion_helper6MpnENode_2II222pnFBlock_I_v_;
@ -1244,7 +1217,6 @@ text: .text%__1cRshrP_reg_imm5NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cQandI_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQandI_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cIciMethodbBinterpreter_call_site_count6Mi_i_; text: .text%__1cIciMethodbBinterpreter_call_site_count6Mi_i_;
text: .text%__1cGBitMapIset_from6M0_v_; text: .text%__1cGBitMapIset_from6M0_v_;
text: .text%__1cNCompileBrokerOcompile_method6FnMmethodHandle_i1ipkcpnGThread__pnHnmethod__;
text: .text%__1cTconstantPoolOopDescbDresolve_string_constants_impl6FnSconstantPoolHandle_pnGThread__v_; text: .text%__1cTconstantPoolOopDescbDresolve_string_constants_impl6FnSconstantPoolHandle_pnGThread__v_;
text: .text%__1cHSubNodeIIdentity6MpnOPhaseTransform__pnENode__; text: .text%__1cHSubNodeIIdentity6MpnOPhaseTransform__pnENode__;
text: .text%__1cFChunk2n6FII_pv_; text: .text%__1cFChunk2n6FII_pv_;
@ -1278,7 +1250,6 @@ text: .text%__1cNCatchProjNodeDcmp6kMrknENode__I_;
text: .text%__1cKTypeOopPtrEhash6kM_i_; text: .text%__1cKTypeOopPtrEhash6kM_i_;
text: .text%__1cIMinINodeGOpcode6kM_i_; text: .text%__1cIMinINodeGOpcode6kM_i_;
text: .text%__1cMURShiftINodeIIdentity6MpnOPhaseTransform__pnENode__; text: .text%__1cMURShiftINodeIIdentity6MpnOPhaseTransform__pnENode__;
text: .text%__1cFframeRoops_code_blob_do6MpnKOopClosure_pknLRegisterMap__v_;
text: .text%__1cKTypeRawPtrFxmeet6kMpknEType__3_; text: .text%__1cKTypeRawPtrFxmeet6kMpknEType__3_;
text: .text%JVM_GetMethodIxModifiers; text: .text%JVM_GetMethodIxModifiers;
text: .text%__1cIMulLNodeLbottom_type6kM_pknEType__: classes.o; text: .text%__1cIMulLNodeLbottom_type6kM_pknEType__: classes.o;
@ -1325,7 +1296,6 @@ text: .text%__1cSconvI2D_helperNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_; text: .text%__1cPClassFileStreamHskip_u26MipnGThread__v_;
text: .text%__1cUcompI_iReg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cUcompI_iReg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cOMacroAssemblerNverify_thread6M_v_; text: .text%__1cOMacroAssemblerNverify_thread6M_v_;
text: .text%__1cIGraphKitZset_results_for_java_call6MpnMCallJavaNode__pnENode__;
text: .text%__1cSbranchCon_longNodeIpipeline6kM_pknIPipeline__; text: .text%__1cSbranchCon_longNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cHnmethodVcleanup_inline_caches6M_v_; text: .text%__1cHnmethodVcleanup_inline_caches6M_v_;
text: .text%__1cTciConstantPoolCacheGinsert6Mipv_v_; text: .text%__1cTciConstantPoolCacheGinsert6Mipv_v_;
@ -1356,12 +1326,9 @@ text: .text%__1cKoopFactoryTnew_system_objArray6FipnGThread__pnPobjArrayOopDesc_
text: .text%__1cbDcatch_cleanup_find_cloned_def6FpnFBlock_pnENode_1rnLBlock_Array_i_3_: lcm.o; text: .text%__1cbDcatch_cleanup_find_cloned_def6FpnFBlock_pnENode_1rnLBlock_Array_i_3_: lcm.o;
text: .text%__1cQxorI_reg_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQxorI_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cKstoreLNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cKstoreLNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cFciEnvVget_constant_by_index6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cFciEnvbAget_constant_by_index_impl6MpnPciInstanceKlass_i_nKciConstant__;
text: .text%__1cOClearArrayNodeKmatch_edge6kMI_I_; text: .text%__1cOClearArrayNodeKmatch_edge6kMI_I_;
text: .text%__1cPconvL2I_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cPconvL2I_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cJloadSNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cJloadSNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cKJavaThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cSFixupMirrorClosureJdo_object6MpnHoopDesc__v_: universe.o; text: .text%__1cSFixupMirrorClosureJdo_object6MpnHoopDesc__v_: universe.o;
text: .text%__1cFStateP_sub_Op_LShiftI6MpknENode__v_; text: .text%__1cFStateP_sub_Op_LShiftI6MpknENode__v_;
text: .text%__1cQandL_reg_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cQandL_reg_regNodeLout_RegMask6kM_rknHRegMask__;
@ -1387,7 +1354,6 @@ text: .text%__1cOAbstractICacheQinvalidate_range6FpCi_v_;
text: .text%__1cKstorePNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cKstorePNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cIMaxINodeGOpcode6kM_i_; text: .text%__1cIMaxINodeGOpcode6kM_i_;
text: .text%__1cTDirectCallGeneratorIgenerate6MpnIJVMState__2_; text: .text%__1cTDirectCallGeneratorIgenerate6MpnIJVMState__2_;
text: .text%__1cNCallGeneratorPfor_direct_call6FpnIciMethod__p0_;
text: .text%__1cMWarmCallInfoLalways_cold6F_p0_; text: .text%__1cMWarmCallInfoLalways_cold6F_p0_;
text: .text%__1cIimmDOperJconstantD6kM_d_: ad_sparc_clone.o; text: .text%__1cIimmDOperJconstantD6kM_d_: ad_sparc_clone.o;
text: .text%__1cIPhaseIFGEinit6MI_v_; text: .text%__1cIPhaseIFGEinit6MI_v_;
@ -1512,7 +1478,6 @@ text: .text%__1cPciObjectFactoryMvm_symbol_at6Fi_pnIciSymbol__;
text: .text%__1cKDictionaryJadd_klass6MnMsymbolHandle_nGHandle_nLKlassHandle__v_; text: .text%__1cKDictionaryJadd_klass6MnMsymbolHandle_nGHandle_nLKlassHandle__v_;
text: .text%__1cVshrL_reg_imm6_L2INodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cVshrL_reg_imm6_L2INodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cZCallDynamicJavaDirectNodePoper_input_base6kM_I_: ad_sparc_misc.o; text: .text%__1cZCallDynamicJavaDirectNodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cIGraphKitTcreate_and_xform_if6MpnENode_2ff_pnGIfNode__: graphKit.o;
text: .text%__1cWImplicitExceptionTableGappend6MII_v_; text: .text%__1cWImplicitExceptionTableGappend6MII_v_;
text: .text%__1cRMachNullCheckNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cRMachNullCheckNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cLProfileDataPpost_initialize6MpnOBytecodeStream_pnRmethodDataOopDesc__v_: ciMethodData.o; text: .text%__1cLProfileDataPpost_initialize6MpnOBytecodeStream_pnRmethodDataOopDesc__v_: ciMethodData.o;
@ -1534,14 +1499,12 @@ text: .text%__1cFKlassMset_subklass6MpnMklassOopDesc__v_;
text: .text%__1cOGenerateOopMapLmerge_state6Fp0ipi_v_; text: .text%__1cOGenerateOopMapLmerge_state6Fp0ipi_v_;
text: .text%__1cMTypeKlassPtrFxdual6kM_pknEType__; text: .text%__1cMTypeKlassPtrFxdual6kM_pknEType__;
text: .text%__1cQSystemDictionaryVdefine_instance_class6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cQSystemDictionaryVdefine_instance_class6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cSinstanceKlassKlassXallocate_instance_klass6MiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cPClassFileParserbBcheck_final_method_override6FnTinstanceKlassHandle_pnGThread__v_; text: .text%__1cPClassFileParserbBcheck_final_method_override6FnTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cJCodeCachebKnumber_of_nmethods_with_dependencies6F_i_; text: .text%__1cJCodeCachebKnumber_of_nmethods_with_dependencies6F_i_;
text: .text%__1cNinstanceKlassQinit_implementor6M_v_; text: .text%__1cNinstanceKlassQinit_implementor6M_v_;
text: .text%__1cPClassFileStream2t6MpCipc_v_; text: .text%__1cPClassFileStream2t6MpCipc_v_;
text: .text%__1cNinstanceKlassSprocess_interfaces6MpnGThread__v_; text: .text%__1cNinstanceKlassSprocess_interfaces6MpnGThread__v_;
text: .text%__1cNinstanceKlassYcompute_secondary_supers6MipnGThread__pnPobjArrayOopDesc__; text: .text%__1cNinstanceKlassYcompute_secondary_supers6MipnGThread__pnPobjArrayOopDesc__;
text: .text%__1cKoopFactoryRnew_instanceKlass6FiiiinNReferenceType_pnGThread__pnMklassOopDesc__;
text: .text%__1cNinstanceKlassWdo_local_static_fields6MpFpnPfieldDescriptor_pnGThread__v4_v_; text: .text%__1cNinstanceKlassWdo_local_static_fields6MpFpnPfieldDescriptor_pnGThread__v4_v_;
text: .text%__1cPClassFileParserMsort_methods6MnOobjArrayHandle_111pnGThread__nPtypeArrayHandle__; text: .text%__1cPClassFileParserMsort_methods6MnOobjArrayHandle_111pnGThread__nPtypeArrayHandle__;
text: .text%__1cFKlassKsuperklass6kM_pnNinstanceKlass__; text: .text%__1cFKlassKsuperklass6kM_pnNinstanceKlass__;
@ -1561,22 +1524,17 @@ text: .text%__1cISubINodeJideal_reg6kM_I_: classes.o;
text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o; text: .text%__1cNinstanceKlassOset_alloc_size6MI_v_: instanceKlass.o;
text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o; text: .text%__1cNinstanceKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlass.o;
text: .text%__1cHMemNodeHsize_of6kM_I_; text: .text%__1cHMemNodeHsize_of6kM_I_;
text: .text%__1cFVTuneQstart_class_load6F_v_;
text: .text%__1cSThreadProfilerMark2T6M_v_; text: .text%__1cSThreadProfilerMark2T6M_v_;
text: .text%__1cFVTuneOend_class_load6F_v_;
text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__; text: .text%__1cLClassLoaderOload_classfile6FnMsymbolHandle_pnGThread__nTinstanceKlassHandle__;
text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o; text: .text%__1cJEventMark2t6MpkcE_v_: classLoader.o;
text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_; text: .text%__1cSThreadProfilerMark2t6Mn0AGRegion__v_;
text: .text%__1cQSystemDictionaryRload_shared_class6FnTinstanceKlassHandle_nGHandle_pnGThread__1_; text: .text%__1cQSystemDictionaryRload_shared_class6FnTinstanceKlassHandle_nGHandle_pnGThread__1_;
text: .text%__1cKklassKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cPClassFileParserbKparse_classfile_sourcefile_attribute6MnSconstantPoolHandle_nTinstanceKlassHandle_pnGThread__v_; text: .text%__1cPClassFileParserbKparse_classfile_sourcefile_attribute6MnSconstantPoolHandle_nTinstanceKlassHandle_pnGThread__v_;
text: .text%__1cQmodI_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cQmodI_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cLRShiftINodeLbottom_type6kM_pknEType__: classes.o; text: .text%__1cLRShiftINodeLbottom_type6kM_pknEType__: classes.o;
text: .text%__1cKCMoveINodeGOpcode6kM_i_; text: .text%__1cKCMoveINodeGOpcode6kM_i_;
text: .text%__1cLLShiftLNodeGOpcode6kM_i_; text: .text%__1cLLShiftLNodeGOpcode6kM_i_;
text: .text%__1cYcompareAndSwapL_boolNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cYcompareAndSwapL_boolNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cSinstanceKlassKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cNinstanceKlassScopy_static_fields6MpnSPSPromotionManager__v_;
text: .text%__1cMtlsLoadPNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cMtlsLoadPNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cFStateQ_sub_Op_URShiftI6MpknENode__v_; text: .text%__1cFStateQ_sub_Op_URShiftI6MpknENode__v_;
text: .text%__1cKcmpOpUOperGnegate6M_v_: ad_sparc_clone.o; text: .text%__1cKcmpOpUOperGnegate6M_v_: ad_sparc_clone.o;
@ -1626,7 +1584,6 @@ text: .text%__1cTOopMapForCacheEntryZfill_stackmap_for_opcodes6MpnOBytecodeStrea
text: .text%__1cSaddL_reg_imm13NodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cSaddL_reg_imm13NodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cQshrL_reg_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQshrL_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cKstoreLNodeIpipeline6kM_pknIPipeline__; text: .text%__1cKstoreLNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cNSharedRuntimebKexception_handler_for_return_address6FpC_1_;
text: .text%__1cILoopNodeHsize_of6kM_I_: classes.o; text: .text%__1cILoopNodeHsize_of6kM_I_: classes.o;
text: .text%__1cHMatcherLfind_shared6MpnENode__v_; text: .text%__1cHMatcherLfind_shared6MpnENode__v_;
text: .text%__1cJStartNodeHsize_of6kM_I_; text: .text%__1cJStartNodeHsize_of6kM_I_;
@ -1643,8 +1600,6 @@ text: .text%__1cRcompL_reg_conNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cMLinkResolverbHlookup_instance_method_in_klasses6FrnMmethodHandle_nLKlassHandle_nMsymbolHandle_4pnGThread__v_; text: .text%__1cMLinkResolverbHlookup_instance_method_in_klasses6FrnMmethodHandle_nLKlassHandle_nMsymbolHandle_4pnGThread__v_;
text: .text%__1cMnegF_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cMnegF_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cNSharedRuntimebWnative_method_throw_unsatisfied_link_error_entry6F_pC_; text: .text%__1cNSharedRuntimebWnative_method_throw_unsatisfied_link_error_entry6F_pC_;
text: .text%__1cTStackWalkCompPolicyYmethod_back_branch_event6MnMmethodHandle_iipnGThread__v_;
text: .text%__1cRCompilationPolicybJreset_counter_for_back_branch_event6MnMmethodHandle__v_;
text: .text%__1cOMethodLivenessQcompute_liveness6M_v_; text: .text%__1cOMethodLivenessQcompute_liveness6M_v_;
text: .text%__1cOMethodLiveness2t6MpnFArena_pnIciMethod__v_; text: .text%__1cOMethodLiveness2t6MpnFArena_pnIciMethod__v_;
text: .text%__1cOMethodLivenessNinit_gen_kill6M_v_; text: .text%__1cOMethodLivenessNinit_gen_kill6M_v_;
@ -1654,7 +1609,6 @@ text: .text%__1cIGraphKitHopt_iff6MpnENode_2_2_;
text: .text%__1cLRShiftINodeIIdentity6MpnOPhaseTransform__pnENode__; text: .text%__1cLRShiftINodeIIdentity6MpnOPhaseTransform__pnENode__;
text: .text%__1cJTimeStampGupdate6M_v_; text: .text%__1cJTimeStampGupdate6M_v_;
text: .text%__1cRmethodDataOopDescKmileage_of6FpnNmethodOopDesc__i_; text: .text%__1cRmethodDataOopDescKmileage_of6FpnNmethodOopDesc__i_;
text: .text%__1cWconstantPoolCacheKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cMloadConDNodeIpipeline6kM_pknIPipeline__; text: .text%__1cMloadConDNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cFParseQarray_addressing6MnJBasicType_ippknEType__pnENode__; text: .text%__1cFParseQarray_addressing6MnJBasicType_ippknEType__pnENode__;
text: .text%__1cNloadConP0NodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cNloadConP0NodeLout_RegMask6kM_rknHRegMask__;
@ -1673,7 +1627,6 @@ text: .text%__1cWstatic_call_RelocationEtype6M_nJrelocInfoJrelocType__: relocInf
text: .text%__1cNflagsRegLOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cNflagsRegLOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cYciExceptionHandlerStreamPcount_remaining6M_i_; text: .text%__1cYciExceptionHandlerStreamPcount_remaining6M_i_;
text: .text%__1cFParseXcatch_inline_exceptions6MpnNSafePointNode__v_; text: .text%__1cFParseXcatch_inline_exceptions6MpnNSafePointNode__v_;
text: .text%__1cRconstantPoolKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cNobjArrayKlassKcopy_array6MpnMarrayOopDesc_i2iipnGThread__v_; text: .text%__1cNobjArrayKlassKcopy_array6MpnMarrayOopDesc_i2iipnGThread__v_;
text: .text%__1cKcmpOpUOperNgreater_equal6kM_i_: ad_sparc_clone.o; text: .text%__1cKcmpOpUOperNgreater_equal6kM_i_: ad_sparc_clone.o;
text: .text%JVM_GetFieldIxModifiers; text: .text%JVM_GetFieldIxModifiers;
@ -1698,7 +1651,6 @@ text: .text%__1cKo0RegPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cIregDOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cIregDOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_; text: .text%__1cNmethodOopDescTverified_code_entry6M_pC_;
text: .text%__1cNSharedRuntimeXfind_callee_info_helper6FpnKJavaThread_rnMvframeStream_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__; text: .text%__1cNSharedRuntimeXfind_callee_info_helper6FpnKJavaThread_rnMvframeStream_rnJBytecodesECode_rnICallInfo_pnGThread__nGHandle__;
text: .text%__1cPBytecode_invokeFindex6kM_i_;
text: .text%__1cLRethrowNodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cLRethrowNodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cSPSKeepAliveClosureGdo_oop6MppnHoopDesc__v_: psScavenge.o; text: .text%__1cSPSKeepAliveClosureGdo_oop6MppnHoopDesc__v_: psScavenge.o;
text: .text%__1cFParseFBlockRsuccessor_for_bci6Mi_p1_; text: .text%__1cFParseFBlockRsuccessor_for_bci6Mi_p1_;
@ -1736,7 +1688,6 @@ text: .text%__1cJCodeCacheMfind_nmethod6Fpv_pnHnmethod__;
text: .text%__1cOPhaseIdealLoopMdominated_by6MpnENode_2_v_; text: .text%__1cOPhaseIdealLoopMdominated_by6MpnENode_2_v_;
text: .text%__1cQshlI_reg_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cQshlI_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cFParseNthrow_to_exit6MpnNSafePointNode__v_; text: .text%__1cFParseNthrow_to_exit6MpnNSafePointNode__v_;
text: .text%__1cQinstanceRefKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cVConstantOopWriteValueIwrite_on6MpnUDebugInfoWriteStream__v_; text: .text%__1cVConstantOopWriteValueIwrite_on6MpnUDebugInfoWriteStream__v_;
text: .text%__1cJVectorSetGslamin6Mrk0_v_; text: .text%__1cJVectorSetGslamin6Mrk0_v_;
text: .text%JVM_Clone; text: .text%JVM_Clone;
@ -1772,7 +1723,6 @@ text: .text%__1cXmembar_release_lockNodeIadr_type6kM_pknHTypePtr__;
text: .text%__1cJNode_ListEyank6MpnENode__v_; text: .text%__1cJNode_ListEyank6MpnENode__v_;
text: .text%__1cMPhaseChaitinISimplify6M_v_; text: .text%__1cMPhaseChaitinISimplify6M_v_;
text: .text%__1cNIdealLoopTreeIset_nest6MI_i_; text: .text%__1cNIdealLoopTreeIset_nest6MI_i_;
text: .text%__1cSCallLeafDirectNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cIMulLNodeImul_ring6kMpknEType_3_3_; text: .text%__1cIMulLNodeImul_ring6kMpknEType_3_3_;
text: .text%__1cMStartOSRNodeGOpcode6kM_i_; text: .text%__1cMStartOSRNodeGOpcode6kM_i_;
text: .text%__1cSCallLeafDirectNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cSCallLeafDirectNodeLout_RegMask6kM_rknHRegMask__;
@ -1792,7 +1742,6 @@ text: .text%__1cSMemBarVolatileNodeGOpcode6kM_i_;
text: .text%__1cLstoreB0NodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cLstoreB0NodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cRshrI_reg_imm5NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cRshrI_reg_imm5NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc__pc_; text: .text%__1cQjava_lang_StringOas_utf8_string6FpnHoopDesc__pc_;
text: .text%__1cRcmpFastUnlockNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cNSafePointNodeLpop_monitor6M_v_; text: .text%__1cNSafePointNodeLpop_monitor6M_v_;
text: .text%__1cMPhaseChaitinVfind_base_for_derived6MppnENode_2rI_2_; text: .text%__1cMPhaseChaitinVfind_base_for_derived6MppnENode_2rI_2_;
text: .text%__1cLOptoRuntimebAcomplete_monitor_exit_Type6F_pknITypeFunc__; text: .text%__1cLOptoRuntimebAcomplete_monitor_exit_Type6F_pknITypeFunc__;
@ -1824,8 +1773,6 @@ text: .text%__1cOGenerateOopMapGdo_ldc6Mii_v_;
text: .text%__1cJCMoveNodeLis_cmove_id6FpnOPhaseTransform_pnENode_44pnIBoolNode__4_; text: .text%__1cJCMoveNodeLis_cmove_id6FpnOPhaseTransform_pnENode_44pnIBoolNode__4_;
text: .text%__1cKTypeAryPtrQcast_to_ptr_type6kMnHTypePtrDPTR__pknEType__; text: .text%__1cKTypeAryPtrQcast_to_ptr_type6kMnHTypePtrDPTR__pknEType__;
text: .text%__1cOPhaseIdealLoopKDominators6M_v_; text: .text%__1cOPhaseIdealLoopKDominators6M_v_;
text: .text%__1cOPhaseIdealLoopPbuild_loop_late6MrnJVectorSet_rnJNode_List_rnKNode_Stack_pk0_v_;
text: .text%__1cOPhaseIdealLoopQbuild_loop_early6MrnJVectorSet_rnJNode_List_rnKNode_Stack_pk0_v_;
text: .text%jni_NewGlobalRef: jni.o; text: .text%jni_NewGlobalRef: jni.o;
text: .text%__1cTciConstantPoolCache2t6MpnFArena_i_v_; text: .text%__1cTciConstantPoolCache2t6MpnFArena_i_v_;
text: .text%__1cIAndINodeJideal_reg6kM_I_: classes.o; text: .text%__1cIAndINodeJideal_reg6kM_I_: classes.o;
@ -1981,10 +1928,8 @@ text: .text%__1cFciEnvKcompile_id6M_I_;
text: .text%__1cPmethodDataKlassIallocate6MnMmethodHandle_pnGThread__pnRmethodDataOopDesc__; text: .text%__1cPmethodDataKlassIallocate6MnMmethodHandle_pnGThread__pnRmethodDataOopDesc__;
text: .text%__1cKoopFactoryOnew_methodData6FnMmethodHandle_pnGThread__pnRmethodDataOopDesc__; text: .text%__1cKoopFactoryOnew_methodData6FnMmethodHandle_pnGThread__pnRmethodDataOopDesc__;
text: .text%__1cIAndLNodeJideal_reg6kM_I_: classes.o; text: .text%__1cIAndLNodeJideal_reg6kM_I_: classes.o;
text: .text%__1cKCodeBuffer2t6MpCi_v_;
text: .text%__1cVshrL_reg_imm6_L2INodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cVshrL_reg_imm6_L2INodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cLConvL2INodeIIdentity6MpnOPhaseTransform__pnENode__; text: .text%__1cLConvL2INodeIIdentity6MpnOPhaseTransform__pnENode__;
text: .text%__1cIciMethodRinstructions_size6M_i_;
text: .text%__1cSmulI_reg_imm13NodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cSmulI_reg_imm13NodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cCosXthread_local_storage_at6Fi_pv_; text: .text%__1cCosXthread_local_storage_at6Fi_pv_;
text: .text%__1cMindIndexOperNconstant_disp6kM_i_: ad_sparc.o; text: .text%__1cMindIndexOperNconstant_disp6kM_i_: ad_sparc.o;
@ -2013,7 +1958,6 @@ text: .text%__1cLOpaque2NodeLbottom_type6kM_pknEType__: connode.o;
text: .text%__1cSconvI2D_helperNodeIpipeline6kM_pknIPipeline__; text: .text%__1cSconvI2D_helperNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cUPSGenerationCountersKupdate_all6M_v_: psGenerationCounters.o; text: .text%__1cUPSGenerationCountersKupdate_all6M_v_: psGenerationCounters.o;
text: .text%__1cQComputeCallStackHdo_long6M_v_: generateOopMap.o; text: .text%__1cQComputeCallStackHdo_long6M_v_: generateOopMap.o;
text: .text%__1cKTypeOopPtrSmake_from_constant6FpnIciObject__pk0_;
text: .text%__1cQregP_to_stkPNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cQregP_to_stkPNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cOGenerateOopMapHppstore6MpnNCellTypeState_i_v_; text: .text%__1cOGenerateOopMapHppstore6MpnNCellTypeState_i_v_;
text: .text%__1cJTimeStampSticks_since_update6kM_x_; text: .text%__1cJTimeStampSticks_since_update6kM_x_;
@ -2038,7 +1982,6 @@ text: .text%__1cIXorINodeGadd_id6kM_pknEType__: classes.o;
text: .text%__1cFStateM_sub_Op_AndI6MpknENode__v_; text: .text%__1cFStateM_sub_Op_AndI6MpknENode__v_;
text: .text%__1cVshrL_reg_imm6_L2INodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cVshrL_reg_imm6_L2INodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cKcmpOpFOperJnum_edges6kM_I_: ad_sparc_clone.o; text: .text%__1cKcmpOpFOperJnum_edges6kM_I_: ad_sparc_clone.o;
text: .text%__1cLRuntimeStubHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cTmembar_volatileNodeIpipeline6kM_pknIPipeline__; text: .text%__1cTmembar_volatileNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cJloadFNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cJloadFNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cFStateL_sub_Op_OrI6MpknENode__v_; text: .text%__1cFStateL_sub_Op_OrI6MpknENode__v_;
@ -2066,7 +2009,6 @@ text: .text%__1cJttyLockerbCbreak_tty_lock_for_safepoint6Fi_v_;
text: .text%__1cSmembar_acquireNodeIadr_type6kM_pknHTypePtr__; text: .text%__1cSmembar_acquireNodeIadr_type6kM_pknHTypePtr__;
text: .text%__1cPorI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cPorI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cIPhaseCFGOinsert_goto_at6MII_v_; text: .text%__1cIPhaseCFGOinsert_goto_at6MII_v_;
text: .text%__1cITypeLongFwiden6kMpknEType__3_;
text: .text%__1cSThreadLocalStoragePget_thread_slow6F_pnGThread__; text: .text%__1cSThreadLocalStoragePget_thread_slow6F_pnGThread__;
text: .text%__1cPCallRuntimeNodeGOpcode6kM_i_; text: .text%__1cPCallRuntimeNodeGOpcode6kM_i_;
text: .text%__1cJcmpOpOperNgreater_equal6kM_i_: ad_sparc_clone.o; text: .text%__1cJcmpOpOperNgreater_equal6kM_i_: ad_sparc_clone.o;
@ -2097,7 +2039,6 @@ text: .text%__1cSTailCalljmpIndNodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cKstoreLNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cKstoreLNodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cGIfNodeMdominated_by6MpnENode_pnMPhaseIterGVN__v_; text: .text%__1cGIfNodeMdominated_by6MpnENode_pnMPhaseIterGVN__v_;
text: .text%__1cOcompiledVFrame2t6MpknFframe_pknLRegisterMap_pnKJavaThread_pnJScopeDesc__v_; text: .text%__1cOcompiledVFrame2t6MpknFframe_pknLRegisterMap_pnKJavaThread_pnJScopeDesc__v_;
text: .text%__1cJScopeDesc2t6MpknHnmethod_i_v_;
text: .text%__1cQshlI_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQshlI_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cOGenerateOopMapJdo_astore6Mi_v_; text: .text%__1cOGenerateOopMapJdo_astore6Mi_v_;
text: .text%__1cbFunnecessary_membar_volatileNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cbFunnecessary_membar_volatileNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
@ -2127,7 +2068,6 @@ text: .text%__1cLBoxLockNode2t6Mi_v_;
text: .text%__1cPconvF2D_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cPconvF2D_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cLOptoRuntimebBcomplete_monitor_enter_Type6F_pknITypeFunc__; text: .text%__1cLOptoRuntimebBcomplete_monitor_enter_Type6F_pknITypeFunc__;
text: .text%__1cIGraphKitLshared_lock6MpnENode__pnMFastLockNode__; text: .text%__1cIGraphKitLshared_lock6MpnENode__pnMFastLockNode__;
text: .text%__1cPcmpFastLockNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cNloadConP0NodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cNloadConP0NodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cRorI_reg_imm13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cRorI_reg_imm13NodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cKcmpOpUOperEless6kM_i_: ad_sparc_clone.o; text: .text%__1cKcmpOpUOperEless6kM_i_: ad_sparc_clone.o;
@ -2135,7 +2075,6 @@ text: .text%__1cQaddF_reg_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_; text: .text%__1cRLowMemoryDetectorWdetect_after_gc_memory6FpnKMemoryPool__v_;
text: .text%lwp_mutex_init: os_solaris.o; text: .text%lwp_mutex_init: os_solaris.o;
text: .text%__1cRsubI_zero_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cRsubI_zero_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cFframeLnmethods_do6M_v_;
text: .text%__1cQjava_lang_ThreadGthread6FpnHoopDesc__pnKJavaThread__; text: .text%__1cQjava_lang_ThreadGthread6FpnHoopDesc__pnKJavaThread__;
text: .text%__1cQnotemp_iRegIOperEtype6kM_pknEType__: ad_sparc.o; text: .text%__1cQnotemp_iRegIOperEtype6kM_pknEType__: ad_sparc.o;
text: .text%__1cITemplateIbytecode6kM_nJBytecodesECode__; text: .text%__1cITemplateIbytecode6kM_nJBytecodesECode__;
@ -2178,7 +2117,6 @@ text: .text%__1cKManagementJtimestamp6F_x_;
text: .text%__1cIPSOldGenPupdate_counters6M_v_; text: .text%__1cIPSOldGenPupdate_counters6M_v_;
text: .text%__1cQshrI_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQshrI_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cFForteNregister_stub6FpkcpC3_v_; text: .text%__1cFForteNregister_stub6FpkcpC3_v_;
text: .text%__1cFVTuneNregister_stub6FpkcpC3_v_;
text: .text%__1cNinstanceKlassbFlookup_method_in_all_interfaces6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__; text: .text%__1cNinstanceKlassbFlookup_method_in_all_interfaces6kMpnNsymbolOopDesc_2_pnNmethodOopDesc__;
text: .text%__1cTloadL_unalignedNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cTloadL_unalignedNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cJloadLNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cJloadLNodeOmemory_operand6kM_pknIMachOper__;
@ -2249,11 +2187,9 @@ text: .text%__1cSmembar_acquireNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_; text: .text%__1cYDebugInformationRecorderHcopy_to6MpnHnmethod__v_;
text: .text%__1cVExceptionHandlerTableHcopy_to6MpnHnmethod__v_; text: .text%__1cVExceptionHandlerTableHcopy_to6MpnHnmethod__v_;
text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_; text: .text%__1cJCodeCacheGcommit6FpnICodeBlob__v_;
text: .text%__1cFVTuneOcreate_nmethod6FpnHnmethod__v_;
text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_; text: .text%__1cHnmethodQcopy_scopes_data6MpCi_v_;
text: .text%__1cFciEnvVnum_inlined_bytecodes6kM_i_; text: .text%__1cFciEnvVnum_inlined_bytecodes6kM_i_;
text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_; text: .text%__1cWImplicitExceptionTableHcopy_to6MpnHnmethod__v_;
text: .text%__1cLOopRecorderHcopy_to6MpnICodeBlob__v_;
text: .text%__1cIciMethodRbuild_method_data6M_v_; text: .text%__1cIciMethodRbuild_method_data6M_v_;
text: .text%__1cHCompileIOptimize6M_v_; text: .text%__1cHCompileIOptimize6M_v_;
text: .text%__1cHCompileLFinish_Warm6M_v_; text: .text%__1cHCompileLFinish_Warm6M_v_;
@ -2365,7 +2301,6 @@ text: .text%__1cQmulF_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cQxorI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cQxorI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cWCallLeafNoFPDirectNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cWCallLeafNoFPDirectNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cLcmpD_ccNodeIpipeline6kM_pknIPipeline__; text: .text%__1cLcmpD_ccNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cWCallLeafNoFPDirectNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cJloadINodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cJloadINodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cbBopt_virtual_call_RelocationLstatic_stub6M_pC_; text: .text%__1cbBopt_virtual_call_RelocationLstatic_stub6M_pC_;
text: .text%__1cNTemplateTableDdef6FnJBytecodesECode_inITosState_3pFi_vi_v_; text: .text%__1cNTemplateTableDdef6FnJBytecodesECode_inITosState_3pFi_vi_v_;
@ -2392,8 +2327,6 @@ text: .text%__1cSandI_reg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cIimmLOperJnum_edges6kM_I_: ad_sparc_clone.o; text: .text%__1cIimmLOperJnum_edges6kM_I_: ad_sparc_clone.o;
text: .text%__1cFParseOmerge_new_path6Mi_v_; text: .text%__1cFParseOmerge_new_path6Mi_v_;
text: .text%__1cQregP_to_stkPNodeLbottom_type6kM_pknEType__: ad_sparc_misc.o; text: .text%__1cQregP_to_stkPNodeLbottom_type6kM_pknEType__: ad_sparc_misc.o;
text: .text%__1cQjava_lang_StringGoffset6FpnHoopDesc__i_;
text: .text%__1cQjava_lang_StringFvalue6FpnHoopDesc__pnQtypeArrayOopDesc__;
text: .text%__1cQjava_lang_StringScreate_from_symbol6FnMsymbolHandle_pnGThread__nGHandle__; text: .text%__1cQjava_lang_StringScreate_from_symbol6FnMsymbolHandle_pnGThread__nGHandle__;
text: .text%__1cSmembar_releaseNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cSmembar_releaseNodeLout_RegMask6kM_rknHRegMask__;
text: .text%jni_NewByteArray: jni.o; text: .text%jni_NewByteArray: jni.o;
@ -2402,7 +2335,6 @@ text: .text%__1cJJavaCallsMcall_special6FpnJJavaValue_nGHandle_nLKlassHandle_nMs
text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_; text: .text%__1cQSystemDictionarybAvalidate_protection_domain6FnTinstanceKlassHandle_nGHandle_2pnGThread__v_;
text: .text%__1cKDictionaryVadd_protection_domain6MiInTinstanceKlassHandle_nGHandle_2pnGThread__v_; text: .text%__1cKDictionaryVadd_protection_domain6MiInTinstanceKlassHandle_nGHandle_2pnGThread__v_;
text: .text%__1cFParseLdo_newarray6MnJBasicType__v_; text: .text%__1cFParseLdo_newarray6MnJBasicType__v_;
text: .text%__1cPmethodDataKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cNmethodOopDescKklass_name6kM_pnNsymbolOopDesc__; text: .text%__1cNmethodOopDescKklass_name6kM_pnNsymbolOopDesc__;
text: .text%__1cSconvI2D_helperNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cSconvI2D_helperNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cLstoreP0NodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cLstoreP0NodeLout_RegMask6kM_rknHRegMask__;
@ -2454,7 +2386,6 @@ text: .text%__1cJScopeDescGsender6kM_p0_;
text: .text%__1cSxorI_reg_imm13NodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cSxorI_reg_imm13NodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cOcompiledVFrameGsender6kM_pnGvframe__; text: .text%__1cOcompiledVFrameGsender6kM_pnGvframe__;
text: .text%__1cZInterpreterMacroAssemblerDpop6MnITosState__v_; text: .text%__1cZInterpreterMacroAssemblerDpop6MnITosState__v_;
text: .text%__1cGThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cQPlaceholderTableHoops_do6MpnKOopClosure__v_; text: .text%__1cQPlaceholderTableHoops_do6MpnKOopClosure__v_;
text: .text%__1cXJvmtiCurrentBreakpointsHoops_do6FpnKOopClosure__v_; text: .text%__1cXJvmtiCurrentBreakpointsHoops_do6FpnKOopClosure__v_;
text: .text%__1cNMemoryServiceHoops_do6FpnKOopClosure__v_; text: .text%__1cNMemoryServiceHoops_do6FpnKOopClosure__v_;
@ -2462,7 +2393,6 @@ text: .text%__1cNThreadServiceHoops_do6FpnKOopClosure__v_;
text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_; text: .text%__1cKJNIHandlesHoops_do6FpnKOopClosure__v_;
text: .text%__1cQSystemDictionaryRpreloaded_oops_do6FpnKOopClosure__v_; text: .text%__1cQSystemDictionaryRpreloaded_oops_do6FpnKOopClosure__v_;
text: .text%__1cLJvmtiExportHoops_do6FpnKOopClosure__v_; text: .text%__1cLJvmtiExportHoops_do6FpnKOopClosure__v_;
text: .text%__1cIVMThreadHoops_do6MpnKOopClosure__v_;
text: .text%__1cKJNIHandlesMweak_oops_do6FpnRBoolObjectClosure_pnKOopClosure__v_; text: .text%__1cKJNIHandlesMweak_oops_do6FpnRBoolObjectClosure_pnKOopClosure__v_;
text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_; text: .text%__1cSObjectSynchronizerHoops_do6FpnKOopClosure__v_;
text: .text%__1cMFlatProfilerHoops_do6FpnKOopClosure__v_; text: .text%__1cMFlatProfilerHoops_do6FpnKOopClosure__v_;
@ -2510,8 +2440,6 @@ text: .text%__1cMTypeKlassPtrFxmeet6kMpknEType__3_;
text: .text%__1cKPSYoungGenPupdate_counters6M_v_; text: .text%__1cKPSYoungGenPupdate_counters6M_v_;
text: .text%__1cWThreadLocalAllocBufferbFaccumulate_statistics_before_gc6F_v_; text: .text%__1cWThreadLocalAllocBufferbFaccumulate_statistics_before_gc6F_v_;
text: .text%__1cWThreadLocalAllocBufferQresize_all_tlabs6F_v_; text: .text%__1cWThreadLocalAllocBufferQresize_all_tlabs6F_v_;
text: .text%__1cPGCMemoryManagerIgc_begin6M_v_;
text: .text%__1cPGCMemoryManagerGgc_end6M_v_;
text: .text%__1cRLowMemoryDetectorRdetect_low_memory6F_v_; text: .text%__1cRLowMemoryDetectorRdetect_low_memory6F_v_;
text: .text%__1cNMemoryServiceStrack_memory_usage6F_v_; text: .text%__1cNMemoryServiceStrack_memory_usage6F_v_;
text: .text%__1cbAPSGCAdaptivePolicyCountersPupdate_counters6M_v_; text: .text%__1cbAPSGCAdaptivePolicyCountersPupdate_counters6M_v_;
@ -2527,7 +2455,6 @@ text: .text%__1cORuntimeServicebDrecord_safepoint_synchronized6F_v_;
text: .text%__1cQaddF_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQaddF_reg_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cUSafepointSynchronizeFbegin6F_v_; text: .text%__1cUSafepointSynchronizeFbegin6F_v_;
text: .text%__1cKarrayKlassTallocate_arrayArray6MiipnGThread__pnPobjArrayOopDesc__; text: .text%__1cKarrayKlassTallocate_arrayArray6MiipnGThread__pnPobjArrayOopDesc__;
text: .text%__1cONMethodSweeperFsweep6F_v_;
text: .text%__1cCosbAmake_polling_page_readable6F_v_; text: .text%__1cCosbAmake_polling_page_readable6F_v_;
text: .text%__1cUSafepointSynchronizeDend6F_v_; text: .text%__1cUSafepointSynchronizeDend6F_v_;
text: .text%__1cOcmovII_immNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cOcmovII_immNodeErule6kM_I_: ad_sparc_misc.o;
@ -2539,7 +2466,6 @@ text: .text%JVM_GetCallerClass;
text: .text%__1cNSignatureInfoHdo_byte6M_v_: bytecode.o; text: .text%__1cNSignatureInfoHdo_byte6M_v_: bytecode.o;
text: .text%__1cOcmovPP_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cOcmovPP_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cKstoreBNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cKstoreBNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cSobjArrayKlassKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cLstoreC0NodePoper_input_base6kM_I_: ad_sparc_misc.o; text: .text%__1cLstoreC0NodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cTloadL_unalignedNodePoper_input_base6kM_I_: ad_sparc_misc.o; text: .text%__1cTloadL_unalignedNodePoper_input_base6kM_I_: ad_sparc_misc.o;
text: .text%__1cICmpFNodeGOpcode6kM_i_; text: .text%__1cICmpFNodeGOpcode6kM_i_;
@ -2551,7 +2477,6 @@ text: .text%jni_IsAssignableFrom: jni.o;
text: .text%jni_GetFieldID: jni.o; text: .text%jni_GetFieldID: jni.o;
text: .text%__1cJLoadPNodeMstore_Opcode6kM_i_: classes.o; text: .text%__1cJLoadPNodeMstore_Opcode6kM_i_: classes.o;
text: .text%__1cLstoreB0NodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cLstoreB0NodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cZInterpreterMacroAssemblerbAget_cache_and_index_at_bcp6MpnMRegisterImpl_2i_v_;
text: .text%__1cHTypeAryFxdual6kM_pknEType__; text: .text%__1cHTypeAryFxdual6kM_pknEType__;
text: .text%__1cMtlsLoadPNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cMtlsLoadPNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cIVMThreadHexecute6FpnMVM_Operation__v_; text: .text%__1cIVMThreadHexecute6FpnMVM_Operation__v_;
@ -2626,9 +2551,7 @@ text: .text%__1cJloadSNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cRloadConP_pollNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cRloadConP_pollNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cNObjectMonitorHRecycle6M_v_; text: .text%__1cNObjectMonitorHRecycle6M_v_;
text: .text%__1cNSharedRuntimeSfind_callee_method6FpnKJavaThread_pnGThread__nMmethodHandle__; text: .text%__1cNSharedRuntimeSfind_callee_method6FpnKJavaThread_pnGThread__nMmethodHandle__;
text: .text%__1cMloadConLNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cJloadDNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cJloadDNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cQSystemDictionaryTresolve_from_stream6FnMsymbolHandle_nGHandle_2pnPClassFileStream_pnGThread__pnMklassOopDesc__;
text: .text%__1cQstkI_to_regFNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQstkI_to_regFNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cQregP_to_stkPNodeIpipeline6kM_pknIPipeline__; text: .text%__1cQregP_to_stkPNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cZInterpreterMacroAssemblerFpop_i6MpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerFpop_i6MpnMRegisterImpl__v_;
@ -2636,7 +2559,6 @@ text: .text%__1cIMaxINodeGadd_id6kM_pknEType__: classes.o;
text: .text%__1cNSharedRuntimeTreresolve_call_site6FpnKJavaThread_pnGThread__nMmethodHandle__; text: .text%__1cNSharedRuntimeTreresolve_call_site6FpnKJavaThread_pnGThread__nMmethodHandle__;
text: .text%__1cYcompareAndSwapL_boolNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cYcompareAndSwapL_boolNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cNSCMemProjNodeJideal_reg6kM_I_: classes.o; text: .text%__1cNSCMemProjNodeJideal_reg6kM_I_: classes.o;
text: .text%__1cYcompareAndSwapL_boolNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cIProjNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cIProjNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cIPSOldGenMmax_gen_size6M_I_: psOldGen.o; text: .text%__1cIPSOldGenMmax_gen_size6M_I_: psOldGen.o;
text: .text%__1cKExceptionsK_throw_msg6FpnGThread_pkcipnNsymbolOopDesc_4_v_; text: .text%__1cKExceptionsK_throw_msg6FpnGThread_pkcipnNsymbolOopDesc_4_v_;
@ -2855,7 +2777,6 @@ text: .text%__1cOstackSlotPOperEtype6kM_pknEType__: ad_sparc.o;
text: .text%jni_GetMethodID: jni.o; text: .text%jni_GetMethodID: jni.o;
text: .text%__1cQshlL_reg_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cQshlL_reg_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cIMulINodeJideal_reg6kM_I_: classes.o; text: .text%__1cIMulINodeJideal_reg6kM_I_: classes.o;
text: .text%__1cNminI_eRegNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cRshlI_reg_imm5NodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cRshlI_reg_imm5NodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cOloadConL13NodeIpipeline6kM_pknIPipeline__; text: .text%__1cOloadConL13NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_; text: .text%__1cNObjectMonitorGnotify6MpnGThread__v_;
@ -2877,7 +2798,6 @@ text: .text%__1cQcmovI_reg_ltNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cNloadConL0NodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cNloadConL0NodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cKo1RegPOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cKo1RegPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cSsubL_reg_reg_1NodeIpipeline6kM_pknIPipeline__; text: .text%__1cSsubL_reg_reg_1NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cIBytecodeIset_code6MnJBytecodesECode__v_;
text: .text%__1cQshrL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cQshrL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cRsarL_reg_imm6NodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cRsarL_reg_imm6NodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cJloadFNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cJloadFNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
@ -2948,7 +2868,6 @@ text: .text%__1cFStateM_sub_Op_SubL6MpknENode__v_;
text: .text%__1cKCompiledICMstub_address6kM_pC_; text: .text%__1cKCompiledICMstub_address6kM_pC_;
text: .text%__1cJvmSymbolsOsignature_type6FpnNsymbolOopDesc__nJBasicType__; text: .text%__1cJvmSymbolsOsignature_type6FpnNsymbolOopDesc__nJBasicType__;
text: .text%__1cQsubL_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cQsubL_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cQmodI_reg_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cISubDNodeGOpcode6kM_i_; text: .text%__1cISubDNodeGOpcode6kM_i_;
text: .text%__1cQmodI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cQmodI_reg_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cPfieldDescriptorLannotations6kM_pnQtypeArrayOopDesc__; text: .text%__1cPfieldDescriptorLannotations6kM_pnQtypeArrayOopDesc__;
@ -2986,7 +2905,6 @@ text: .text%__1cRtestI_reg_immNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cJMemRegionMintersection6kMk0_0_; text: .text%__1cJMemRegionMintersection6kMk0_0_;
text: .text%__1cKJavaThread2t6MpFp0pnGThread__vI_v_; text: .text%__1cKJavaThread2t6MpFp0pnGThread__vI_v_;
text: .text%__1cKJavaThreadDrun6M_v_; text: .text%__1cKJavaThreadDrun6M_v_;
text: .text%__1cNSafepointBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cPjava_lang_ClassOprimitive_type6FpnHoopDesc__nJBasicType__; text: .text%__1cPjava_lang_ClassOprimitive_type6FpnHoopDesc__nJBasicType__;
text: .text%JVM_IsArrayClass; text: .text%JVM_IsArrayClass;
text: .text%jni_CallStaticVoidMethod: jni.o; text: .text%jni_CallStaticVoidMethod: jni.o;
@ -3017,14 +2935,12 @@ text: .text%__1cXNativeSignatureIteratorGdo_int6M_v_: interpreterRT_sparc.o;
text: .text%__1cINodeHashEgrow6M_v_; text: .text%__1cINodeHashEgrow6M_v_;
text: .text%__1cOGenerateOopMapPdo_monitorenter6Mi_v_; text: .text%__1cOGenerateOopMapPdo_monitorenter6Mi_v_;
text: .text%__1cOcmovPP_regNodeLbottom_type6kM_pknEType__: ad_sparc_misc.o; text: .text%__1cOcmovPP_regNodeLbottom_type6kM_pknEType__: ad_sparc_misc.o;
text: .text%__1cMloadConDNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cIMaxINodeIadd_ring6kMpknEType_3_3_; text: .text%__1cIMaxINodeIadd_ring6kMpknEType_3_3_;
text: .text%__1cJloadSNodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cJloadSNodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cOGenerateOopMapLcompute_map6MpnGThread__v_; text: .text%__1cOGenerateOopMapLcompute_map6MpnGThread__v_;
text: .text%__1cLConvF2DNodeLbottom_type6kM_pknEType__: classes.o; text: .text%__1cLConvF2DNodeLbottom_type6kM_pknEType__: classes.o;
text: .text%JVM_Open; text: .text%JVM_Open;
text: .text%__1cRInvocationCounterFreset6M_v_; text: .text%__1cRInvocationCounterFreset6M_v_;
text: .text%__1cRCompilationPolicybIreset_counter_for_invocation_event6MnMmethodHandle__v_;
text: .text%__1cOGenerateOopMap2t6MnMmethodHandle__v_; text: .text%__1cOGenerateOopMap2t6MnMmethodHandle__v_;
text: .text%__1cOGenerateOopMapRdo_interpretation6M_v_; text: .text%__1cOGenerateOopMapRdo_interpretation6M_v_;
text: .text%__1cIRetTableRcompute_ret_table6MnMmethodHandle__v_; text: .text%__1cIRetTableRcompute_ret_table6MnMmethodHandle__v_;
@ -3144,7 +3060,6 @@ text: .text%__1cLstoreF0NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cIMinINodeIadd_ring6kMpknEType_3_3_; text: .text%__1cIMinINodeIadd_ring6kMpknEType_3_3_;
text: .text%JVM_GetInheritedAccessControlContext; text: .text%JVM_GetInheritedAccessControlContext;
text: .text%__1cPPerfDataManagerWcreate_string_constant6FnJCounterNS_pkc3pnGThread__pnSPerfStringConstant__; text: .text%__1cPPerfDataManagerWcreate_string_constant6FnJCounterNS_pkc3pnGThread__pnSPerfStringConstant__;
text: .text%__1cNmaxI_eRegNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%JVM_NativePath; text: .text%JVM_NativePath;
text: .text%__1cOMacroAssemblerNflush_windows6M_v_; text: .text%__1cOMacroAssemblerNflush_windows6M_v_;
text: .text%__1cSsubD_regD_regDNodeIpipeline6kM_pknIPipeline__; text: .text%__1cSsubD_regD_regDNodeIpipeline6kM_pknIPipeline__;
@ -3157,13 +3072,11 @@ text: .text%__1cVinline_cache_regPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cKstorePNodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cKstorePNodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cQObjectStartArrayFreset6M_v_; text: .text%__1cQObjectStartArrayFreset6M_v_;
text: .text%__1cPconvI2D_memNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cPconvI2D_memNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cHThreadsHoops_do6FpnKOopClosure__v_;
text: .text%__1cQaddD_reg_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cQaddD_reg_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cLConvF2INodeGOpcode6kM_i_; text: .text%__1cLConvF2INodeGOpcode6kM_i_;
text: .text%__1cVCallRuntimeDirectNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cVCallRuntimeDirectNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cJHashtableGunlink6MpnRBoolObjectClosure__v_; text: .text%__1cJHashtableGunlink6MpnRBoolObjectClosure__v_;
text: .text%__1cIPSOldGenPadjust_pointers6M_v_; text: .text%__1cIPSOldGenPadjust_pointers6M_v_;
text: .text%__1cVCallRuntimeDirectNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cOcmovPI_regNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cOcmovPI_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cIPSOldGenHcompact6M_v_; text: .text%__1cIPSOldGenHcompact6M_v_;
text: .text%__1cMtlsLoadPNodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cMtlsLoadPNodeEsize6kMpnNPhaseRegAlloc__I_;
@ -3177,7 +3090,6 @@ text: .text%__1cOcmovLL_regNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%jni_GetStaticMethodID: jni.o; text: .text%jni_GetStaticMethodID: jni.o;
text: .text%__1cZInterpreterMacroAssemblerUupdate_mdp_by_offset6MipnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerUupdate_mdp_by_offset6MipnMRegisterImpl__v_;
text: .text%__1cRtestI_reg_immNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cRtestI_reg_immNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cHnmethodbAmake_not_entrant_or_zombie6Mi_v_;
text: .text%__1cPconvF2D_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cPconvF2D_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cOPhaseIdealLoopKdo_peeling6MpnNIdealLoopTree_rnJNode_List__v_; text: .text%__1cOPhaseIdealLoopKdo_peeling6MpnNIdealLoopTree_rnJNode_List__v_;
text: .text%__1cOcmovLL_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cOcmovLL_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
@ -3290,7 +3202,6 @@ text: .text%__1cINegDNodeLbottom_type6kM_pknEType__: classes.o;
text: .text%__1cLConvI2FNodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cLConvI2FNodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cOcmovLL_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cOcmovLL_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cRorI_reg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cRorI_reg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cTloadL_unalignedNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cTloadL_unalignedNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cTloadL_unalignedNodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cKloadUBNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cKloadUBNodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cXconvI2D_regDHi_regDNodeIpipeline6kM_pknIPipeline__; text: .text%__1cXconvI2D_regDHi_regDNodeIpipeline6kM_pknIPipeline__;
@ -3371,7 +3282,6 @@ text: .text%__1cYjava_lang_reflect_MethodEslot6FpnHoopDesc__i_;
text: .text%__1cYjava_lang_reflect_MethodFclazz6FpnHoopDesc__2_; text: .text%__1cYjava_lang_reflect_MethodFclazz6FpnHoopDesc__2_;
text: .text%__1cYinternal_word_RelocationGtarget6M_pC_; text: .text%__1cYinternal_word_RelocationGtarget6M_pC_;
text: .text%__1cJStubQdDueueKremove_all6M_v_; text: .text%__1cJStubQdDueueKremove_all6M_v_;
text: .text%__1cMloadConFNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cPconvI2D_memNodeIpipeline6kM_pknIPipeline__; text: .text%__1cPconvI2D_memNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cPorL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o; text: .text%__1cPorL_reg_regNodeMideal_Opcode6kM_i_: ad_sparc_misc.o;
text: .text%__1cZInterpreterMacroAssemblerLindex_check6MpnMRegisterImpl_2i22_v_; text: .text%__1cZInterpreterMacroAssemblerLindex_check6MpnMRegisterImpl_2i22_v_;
@ -3405,12 +3315,10 @@ text: .text%__1cSInterpreterRuntimeQcreate_exception6FpnKJavaThread_pc3_v_;
text: .text%__1cQComputeCallStackIdo_array6Mii_v_: generateOopMap.o; text: .text%__1cQComputeCallStackIdo_array6Mii_v_: generateOopMap.o;
text: .text%__1cKPSYoungGenKprecompact6M_v_; text: .text%__1cKPSYoungGenKprecompact6M_v_;
text: .text%__1cXjava_lang_reflect_FieldEslot6FpnHoopDesc__i_; text: .text%__1cXjava_lang_reflect_FieldEslot6FpnHoopDesc__i_;
text: .text%__1cSconvD2I_helperNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cMnegF_regNodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cMnegF_regNodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cHThreadsLgc_prologue6F_v_; text: .text%__1cHThreadsLgc_prologue6F_v_;
text: .text%__1cHThreadsLgc_epilogue6F_v_; text: .text%__1cHThreadsLgc_epilogue6F_v_;
text: .text%__1cPconvI2L_regNodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cPconvI2L_regNodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cPconvD2I_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cJJavaCallsLcall_static6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4nGHandle_pnGThread__v_; text: .text%__1cJJavaCallsLcall_static6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4nGHandle_pnGThread__v_;
text: .text%__1cUParallelScavengeHeapHcollect6MnHGCCauseFCause__v_; text: .text%__1cUParallelScavengeHeapHcollect6MnHGCCauseFCause__v_;
text: .text%__1cRCardTableModRefBSFclear6MnJMemRegion__v_; text: .text%__1cRCardTableModRefBSFclear6MnJMemRegion__v_;
@ -3449,10 +3357,6 @@ text: .text%__1cKPSYoungGenHcompact6M_v_;
text: .text%JVM_GetSystemPackage; text: .text%JVM_GetSystemPackage;
text: .text%__1cPfieldDescriptorTfloat_initial_value6kM_f_; text: .text%__1cPfieldDescriptorTfloat_initial_value6kM_f_;
text: .text%__1cKPSYoungGenPadjust_pointers6M_v_; text: .text%__1cKPSYoungGenPadjust_pointers6M_v_;
text: .text%__1cQUncommonTrapBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cSDeoptimizationBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cNExceptionBlobHoops_do6MpnKOopClosure__v_: codeBlob.o;
text: .text%__1cJCodeCacheHoops_do6FpnKOopClosure__v_;
text: .text%__1cJCodeCacheLgc_prologue6F_v_; text: .text%__1cJCodeCacheLgc_prologue6F_v_;
text: .text%__1cJCodeCacheLgc_epilogue6F_v_; text: .text%__1cJCodeCacheLgc_epilogue6F_v_;
text: .text%__1cIXorINodeIadd_ring6kMpknEType_3_3_; text: .text%__1cIXorINodeIadd_ring6kMpknEType_3_3_;
@ -3508,16 +3412,13 @@ text: .text%__1cSstring_compareNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%jni_GetEnv; text: .text%jni_GetEnv;
text: .text%__1cJloadDNodeOmemory_operand6kM_pknIMachOper__; text: .text%__1cJloadDNodeOmemory_operand6kM_pknIMachOper__;
text: .text%__1cQstkI_to_regINodeLout_RegMask6kM_rknHRegMask__; text: .text%__1cQstkI_to_regINodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cSstring_compareNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cXNativeSignatureIteratorHdo_bool6M_v_: interpreterRT_sparc.o; text: .text%__1cXNativeSignatureIteratorHdo_bool6M_v_: interpreterRT_sparc.o;
text: .text%Unsafe_GetNativeByte; text: .text%Unsafe_GetNativeByte;
text: .text%JVM_NanoTime; text: .text%JVM_NanoTime;
text: .text%__1cCosNjavaTimeNanos6F_x_; text: .text%__1cCosNjavaTimeNanos6F_x_;
text: .text%__1cOMacroAssemblerOrestore_thread6MkpnMRegisterImpl__v_; text: .text%__1cOMacroAssemblerOrestore_thread6MkpnMRegisterImpl__v_;
text: .text%__1cVcompiledICHolderKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cQandL_reg_regNodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cQandL_reg_regNodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cIimmFOperJnum_edges6kM_I_: ad_sparc_clone.o; text: .text%__1cIimmFOperJnum_edges6kM_I_: ad_sparc_clone.o;
text: .text%__1cHThreadsLnmethods_do6F_v_;
text: .text%__1cKcmpOpFOperGnegate6M_v_: ad_sparc_clone.o; text: .text%__1cKcmpOpFOperGnegate6M_v_: ad_sparc_clone.o;
text: .text%__1cICodeBlobFflush6M_v_; text: .text%__1cICodeBlobFflush6M_v_;
text: .text%__1cFParseMdo_anewarray6M_v_; text: .text%__1cFParseMdo_anewarray6M_v_;
@ -3537,8 +3438,6 @@ text: .text%__1cHnmethodSflush_dependencies6MpnRBoolObjectClosure__v_;
text: .text%__1cKo2RegPOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cKo2RegPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cQregI_to_stkINodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cQregI_to_stkINodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cbCAbstractInterpreterGeneratorVgenerate_method_entry6MnTAbstractInterpreterKMethodKind__pC_; text: .text%__1cbCAbstractInterpreterGeneratorVgenerate_method_entry6MnTAbstractInterpreterKMethodKind__pC_;
text: .text%__1cParrayKlassKlassRoop_copy_contents6MpnSPSPromotionManager_pnHoopDesc__v_;
text: .text%__1cFVTuneOdelete_nmethod6FpnHnmethod__v_;
text: .text%__1cWloadConI_x43300000NodeIpipeline6kM_pknIPipeline__; text: .text%__1cWloadConI_x43300000NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cFParseQdo_monitor_enter6M_v_; text: .text%__1cFParseQdo_monitor_enter6M_v_;
text: .text%__1cPorL_reg_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cPorL_reg_regNodeIpipeline6kM_pknIPipeline__;
@ -3547,13 +3446,11 @@ text: .text%JVM_FindPrimitiveClass;
text: .text%__1cVMoveL2D_stack_regNodeIpipeline6kM_pknIPipeline__; text: .text%__1cVMoveL2D_stack_regNodeIpipeline6kM_pknIPipeline__;
text: .text%__1cNTemplateTableEiop26Fn0AJOperation__v_; text: .text%__1cNTemplateTableEiop26Fn0AJOperation__v_;
text: .text%__1cZInterpreterMacroAssemblerMdispatch_via6MnITosState_ppC_v_; text: .text%__1cZInterpreterMacroAssemblerMdispatch_via6MnITosState_ppC_v_;
text: .text%__1cSmodL_reg_imm13NodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cRshrI_reg_imm5NodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cRshrI_reg_imm5NodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cJJavaCallsLcall_static6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4pnGThread__v_; text: .text%__1cJJavaCallsLcall_static6FpnJJavaValue_nLKlassHandle_nMsymbolHandle_4pnGThread__v_;
text: .text%__1cSsubL_reg_reg_2NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cSsubL_reg_reg_2NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cUmulL_reg_imm13_1NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cUmulL_reg_imm13_1NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cIDivDNodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cIDivDNodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cPconvI2F_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cNinstanceKlassUfind_interface_field6kMpnNsymbolOopDesc_2pnPfieldDescriptor__pnMklassOopDesc__; text: .text%__1cNinstanceKlassUfind_interface_field6kMpnNsymbolOopDesc_2pnPfieldDescriptor__pnMklassOopDesc__;
text: .text%__1cOstackSlotFOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cOstackSlotFOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cUdivL_reg_imm13_1NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cUdivL_reg_imm13_1NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
@ -3561,7 +3458,6 @@ text: .text%__1cRSignatureIteratorHiterate6M_v_;
text: .text%__1cOcmovLL_regNodeHtwo_adr6kM_I_: ad_sparc_misc.o; text: .text%__1cOcmovLL_regNodeHtwo_adr6kM_I_: ad_sparc_misc.o;
text: .text%__1cJname2type6Fpkc_nJBasicType__; text: .text%__1cJname2type6Fpkc_nJBasicType__;
text: .text%__1cSmulL_reg_imm13NodeIpipeline6kM_pknIPipeline__; text: .text%__1cSmulL_reg_imm13NodeIpipeline6kM_pknIPipeline__;
text: .text%__1cPBytecode_invokeLresult_type6kMpnGThread__nJBasicType__;
text: .text%__1cOloadConL13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cOloadConL13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cKcmpOpFOperHgreater6kM_i_: ad_sparc_clone.o; text: .text%__1cKcmpOpFOperHgreater6kM_i_: ad_sparc_clone.o;
text: .text%__1cIDivDNodeJideal_reg6kM_I_: classes.o; text: .text%__1cIDivDNodeJideal_reg6kM_I_: classes.o;
@ -3589,7 +3485,6 @@ text: .text%__1cNTemplateTableQfast_accessfield6FnITosState__v_;
text: .text%__1cKCompiledICSset_to_megamorphic6MpnICallInfo_nJBytecodesECode_pnGThread__v_; text: .text%__1cKCompiledICSset_to_megamorphic6MpnICallInfo_nJBytecodesECode_pnGThread__v_;
text: .text%Unsafe_StaticFieldOffset; text: .text%Unsafe_StaticFieldOffset;
text: .text%__1cQmulI_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cQmulI_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cNTemplateTableXresolve_cache_and_index6FipnMRegisterImpl_2_v_;
text: .text%__1cQaddI_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cQaddI_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cOcmovLI_regNodeHtwo_adr6kM_I_: ad_sparc_misc.o; text: .text%__1cOcmovLI_regNodeHtwo_adr6kM_I_: ad_sparc_misc.o;
text: .text%JVM_GetClassContext; text: .text%JVM_GetClassContext;
@ -3725,7 +3620,6 @@ text: .text%__1cQmulD_reg_regNodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%Unsafe_AllocateMemory; text: .text%Unsafe_AllocateMemory;
text: .text%__1cSandL_reg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cSandL_reg_imm13NodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%JVM_GetLastErrorString; text: .text%JVM_GetLastErrorString;
text: .text%__1cQmodL_reg_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cNTemplateTableElop26Fn0AJOperation__v_; text: .text%__1cNTemplateTableElop26Fn0AJOperation__v_;
text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_; text: .text%__1cQjava_lang_ThreadKset_daemon6FpnHoopDesc__v_;
text: .text%__1cNTemplateTableEfop26Fn0AJOperation__v_; text: .text%__1cNTemplateTableEfop26Fn0AJOperation__v_;
@ -3738,7 +3632,6 @@ text: .text%__1cNTemplateTableGlstore6Fi_v_;
text: .text%__1cLConvF2INodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cLConvF2INodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_; text: .text%__1cIciMethod2t6MpnPciInstanceKlass_pnIciSymbol_4_v_;
text: .text%__1cRcompL_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cRcompL_reg_regNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cLconvI2BNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cLConvD2FNodeFValue6kMpnOPhaseTransform__pknEType__; text: .text%__1cLConvD2FNodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cSconvD2I_helperNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cSconvD2I_helperNodeHsize_of6kM_I_: ad_sparc_misc.o;
text: .text%__1cRsubI_zero_regNodeHsize_of6kM_I_: ad_sparc_misc.o; text: .text%__1cRsubI_zero_regNodeHsize_of6kM_I_: ad_sparc_misc.o;
@ -3775,7 +3668,6 @@ text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC22_v_;
text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC2_v_; text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC2_v_;
text: .text%__1cTjava_lang_ThrowableLset_message6FpnHoopDesc_2_v_; text: .text%__1cTjava_lang_ThrowableLset_message6FpnHoopDesc_2_v_;
text: .text%__1cOGenerateOopMapTret_jump_targets_do6MpnOBytecodeStream_pFp0ipi_vi4_v_; text: .text%__1cOGenerateOopMapTret_jump_targets_do6MpnOBytecodeStream_pFp0ipi_vi4_v_;
text: .text%__1cPconvI2D_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%Unsafe_SetMemory; text: .text%Unsafe_SetMemory;
text: .text%__1cKstfSSFNodeErule6kM_I_: ad_sparc_misc.o; text: .text%__1cKstfSSFNodeErule6kM_I_: ad_sparc_misc.o;
text: .text%__1cZInterpreterMacroAssemblerOthrow_if_not_x6MnJAssemblerJCondition_pCpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerOthrow_if_not_x6MnJAssemblerJCondition_pCpnMRegisterImpl__v_;
@ -3798,7 +3690,6 @@ text: .text%__1cRMachSpillCopyNodeHsize_of6kM_I_: ad_sparc.o;
text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_; text: .text%__1cQCompilerCounters2t6MpkcipnGThread__v_;
text: .text%__1cOGenerateOopMapRdo_multianewarray6Mii_v_; text: .text%__1cOGenerateOopMapRdo_multianewarray6Mii_v_;
text: .text%__1cNCompileBrokerUcompiler_thread_loop6F_v_; text: .text%__1cNCompileBrokerUcompiler_thread_loop6F_v_;
text: .text%__1cbFpartialSubtypeCheck_vs_zeroNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%jni_CallStaticObjectMethodV: jni.o; text: .text%jni_CallStaticObjectMethodV: jni.o;
text: .text%__1cNTemplateTableMfast_xaccess6FnITosState__v_; text: .text%__1cNTemplateTableMfast_xaccess6FnITosState__v_;
text: .text%__1cJMemRegionFminus6kMk0_0_; text: .text%__1cJMemRegionFminus6kMk0_0_;
@ -3857,13 +3748,10 @@ text: .text%__1cLstoreF0NodeLout_RegMask6kM_rknHRegMask__;
text: .text%__1cZInterpreterMacroAssemblerWprofile_switch_default6MpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerWprofile_switch_default6MpnMRegisterImpl__v_;
text: .text%__1cTAbstract_VM_VersionOvm_info_string6F_pkc_; text: .text%__1cTAbstract_VM_VersionOvm_info_string6F_pkc_;
text: .text%__1cJStubQdDueue2t6MpnNStubInterface_ipnFMutex_pkc_v_; text: .text%__1cJStubQdDueue2t6MpnNStubInterface_ipnFMutex_pkc_v_;
text: .text%__1cSconvF2I_helperNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cHThreadsbFdeoptimized_wrt_marked_nmethods6F_v_; text: .text%__1cHThreadsbFdeoptimized_wrt_marked_nmethods6F_v_;
text: .text%__1cbAconvL2D_reg_slow_fxtofNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cOstackSlotFOperEdisp6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o; text: .text%__1cOstackSlotFOperEdisp6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o;
text: .text%__1cOstackSlotFOperEbase6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o; text: .text%__1cOstackSlotFOperEbase6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o;
text: .text%__1cOstackSlotFOperFindex6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o; text: .text%__1cOstackSlotFOperFindex6kMpnNPhaseRegAlloc_pknENode_i_i_: ad_sparc.o;
text: .text%__1cPconvF2I_regNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cNTemplateTableGlconst6Fi_v_; text: .text%__1cNTemplateTableGlconst6Fi_v_;
text: .text%__1cLstoreC0NodeEsize6kMpnNPhaseRegAlloc__I_; text: .text%__1cLstoreC0NodeEsize6kMpnNPhaseRegAlloc__I_;
text: .text%__1cMPeriodicTaskGenroll6M_v_; text: .text%__1cMPeriodicTaskGenroll6M_v_;
@ -3913,7 +3801,6 @@ text: .text%__1cLMoveF2INodeFValue6kMpnOPhaseTransform__pknEType__;
text: .text%__1cLOptoRuntimeIl2f_Type6F_pknITypeFunc__; text: .text%__1cLOptoRuntimeIl2f_Type6F_pknITypeFunc__;
text: .text%__1cOMacroAssemblerUcalc_mem_param_words6MpnMRegisterImpl_2_v_; text: .text%__1cOMacroAssemblerUcalc_mem_param_words6MpnMRegisterImpl_2_v_;
text: .text%__1cZInterpreterMacroAssemblerLprofile_ret6MnITosState_pnMRegisterImpl_3_v_; text: .text%__1cZInterpreterMacroAssemblerLprofile_ret6MnITosState_pnMRegisterImpl_3_v_;
text: .text%__1cZInterpreterMacroAssemblerUprofile_virtual_call6MpnMRegisterImpl_2_v_;
text: .text%__1cZInterpreterMacroAssemblerMprofile_call6MpnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerMprofile_call6MpnMRegisterImpl__v_;
text: .text%__1cLklassVtableQindex_of_miranda6MpnNsymbolOopDesc_2_i_; text: .text%__1cLklassVtableQindex_of_miranda6MpnNsymbolOopDesc_2_i_;
text: .text%__1cZInterpreterMacroAssemblerSupdate_mdp_for_ret6MnITosState_pnMRegisterImpl__v_; text: .text%__1cZInterpreterMacroAssemblerSupdate_mdp_for_ret6MnITosState_pnMRegisterImpl__v_;
@ -4001,16 +3888,13 @@ text: .text%__1cSThreadLocalStorageHpd_init6F_v_;
text: .text%__1cVMoveF2I_stack_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cVMoveF2I_stack_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cVMoveL2D_stack_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_; text: .text%__1cVMoveL2D_stack_regNodeEemit6kMrnKCodeBuffer_pnNPhaseRegAlloc__v_;
text: .text%__1cWinvocationCounter_init6F_v_; text: .text%__1cWinvocationCounter_init6F_v_;
text: .text%__1cKTypeOopPtrEmake6FnHTypePtrDPTR_i_pk0_;
text: .text%__1cKTypeOopPtrFxdual6kM_pknEType__; text: .text%__1cKTypeOopPtrFxdual6kM_pknEType__;
text: .text%__1cFParseMjump_if_join6MpnENode_2_2_; text: .text%__1cFParseMjump_if_join6MpnENode_2_2_;
text: .text%__1cSinstanceKlassKlassMcreate_klass6FpnGThread__pnMklassOopDesc__; text: .text%__1cSinstanceKlassKlassMcreate_klass6FpnGThread__pnMklassOopDesc__;
text: .text%__1cSinstanceKlassKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlassKlass.o; text: .text%__1cSinstanceKlassKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: instanceKlassKlass.o;
text: .text%__1cLconvP2BNodeGExpand6MpnFState_rnJNode_List__pnIMachNode__;
text: .text%__1cETypeRInitialize_shared6FpnHCompile__v_; text: .text%__1cETypeRInitialize_shared6FpnHCompile__v_;
text: .text%__1cQinstanceRefKlassZupdate_nonstatic_oop_maps6FpnMklassOopDesc__v_; text: .text%__1cQinstanceRefKlassZupdate_nonstatic_oop_maps6FpnMklassOopDesc__v_;
text: .text%__1cVInterfaceSupport_init6F_v_; text: .text%__1cVInterfaceSupport_init6F_v_;
text: .text%__1cZInterpreterMacroAssemblerSsuper_call_VM_leaf6MpnMRegisterImpl_pC2_v_;
text: .text%__1cPGenerationSizerQinitialize_flags6M_v_: parallelScavengeHeap.o; text: .text%__1cPGenerationSizerQinitialize_flags6M_v_: parallelScavengeHeap.o;
text: .text%__1cZInterpreterMacroAssemblerPdispatch_normal6MnITosState__v_; text: .text%__1cZInterpreterMacroAssemblerPdispatch_normal6MnITosState__v_;
text: .text%__1cJTimeStampMmilliseconds6kM_x_; text: .text%__1cJTimeStampMmilliseconds6kM_x_;
@ -4103,11 +3987,9 @@ text: .text%__1cLmethodKlassOset_alloc_size6MI_v_: methodKlass.o;
text: .text%__1cQvtableStubs_init6F_v_; text: .text%__1cQvtableStubs_init6F_v_;
text: .text%__1cKi0RegPOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cKi0RegPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cKg1RegPOperKin_RegMask6kMi_pknHRegMask__; text: .text%__1cKg1RegPOperKin_RegMask6kMi_pknHRegMask__;
text: .text%__1cFVTuneEexit6F_v_;
text: .text%__1cLmethodKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: methodKlass.o; text: .text%__1cLmethodKlassSallocate_permanent6kMrnLKlassHandle_ipnGThread__pv_: methodKlass.o;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: methodLiveness.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: methodLiveness.o;
text: .text%__1cMMutableSpaceOobject_iterate6MpnNObjectClosure__v_; text: .text%__1cMMutableSpaceOobject_iterate6MpnNObjectClosure__v_;
text: .text%__1cKvtune_init6F_v_;
text: .text%__1cKmutex_init6F_v_; text: .text%__1cKmutex_init6F_v_;
text: .text%__1cQaccessFlags_init6F_v_; text: .text%__1cQaccessFlags_init6F_v_;
text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC222_v_; text: .text%__1cOMacroAssemblerMcall_VM_leaf6MpnMRegisterImpl_pC222_v_;
@ -4440,7 +4322,6 @@ text: .text%__1cNTemplateTableLtableswitch6F_v_;
text: .text%__1cNTemplateTableMlookupswitch6F_v_; text: .text%__1cNTemplateTableMlookupswitch6F_v_;
text: .text%__1cNTemplateTableRfast_linearswitch6F_v_; text: .text%__1cNTemplateTableRfast_linearswitch6F_v_;
text: .text%__1cNTemplateTableRfast_binaryswitch6F_v_; text: .text%__1cNTemplateTableRfast_binaryswitch6F_v_;
text: .text%__1cNCompileBrokerVinit_compiler_threads6Fi_v_;
text: .text%__1cJPSPermGen2t6MnNReservedSpace_IIIIpkci_v_; text: .text%__1cJPSPermGen2t6MnNReservedSpace_IIIIpkci_v_;
text: .text%__1cNCompileBrokerQset_should_block6F_v_; text: .text%__1cNCompileBrokerQset_should_block6F_v_;
text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: compileBroker.o; text: .text%__1cU__STATIC_CONSTRUCTOR6F_v_: compileBroker.o;

File diff suppressed because it is too large Load Diff

View File

@ -51,9 +51,9 @@ ifeq ($(JRE_RELEASE_VER),1.6.0)
VALIDATED_COMPILER_REVS := 5.8 VALIDATED_COMPILER_REVS := 5.8
VALIDATED_C_COMPILER_REVS := 5.8 VALIDATED_C_COMPILER_REVS := 5.8
else else
# Validated compilers for JDK7 are SS12 (5.9) or SS12 update 1 (5.10) # Validated compiler for JDK7 is SS12 update 1 + patches (5.10)
VALIDATED_COMPILER_REVS := 5.9 5.10 VALIDATED_COMPILER_REVS := 5.10
VALIDATED_C_COMPILER_REVS := 5.9 5.10 VALIDATED_C_COMPILER_REVS := 5.10
endif endif
# Warning messages about not using the above validated versions # Warning messages about not using the above validated versions

View File

@ -3094,11 +3094,10 @@ void MacroAssembler::check_klass_subtype_slow_path(Register sub_klass,
void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_reg, void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_reg,
Register temp_reg, Register temp_reg,
Label& wrong_method_type) { Label& wrong_method_type) {
if (UseCompressedOops) unimplemented("coop"); // field accesses must decode
assert_different_registers(mtype_reg, mh_reg, temp_reg); assert_different_registers(mtype_reg, mh_reg, temp_reg);
// compare method type against that of the receiver // compare method type against that of the receiver
RegisterOrConstant mhtype_offset = delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg); RegisterOrConstant mhtype_offset = delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg);
ld_ptr(mh_reg, mhtype_offset, temp_reg); load_heap_oop(mh_reg, mhtype_offset, temp_reg);
cmp(temp_reg, mtype_reg); cmp(temp_reg, mtype_reg);
br(Assembler::notEqual, false, Assembler::pn, wrong_method_type); br(Assembler::notEqual, false, Assembler::pn, wrong_method_type);
delayed()->nop(); delayed()->nop();
@ -3112,16 +3111,15 @@ void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_re
void MacroAssembler::load_method_handle_vmslots(Register vmslots_reg, Register mh_reg, void MacroAssembler::load_method_handle_vmslots(Register vmslots_reg, Register mh_reg,
Register temp_reg) { Register temp_reg) {
assert_different_registers(vmslots_reg, mh_reg, temp_reg); assert_different_registers(vmslots_reg, mh_reg, temp_reg);
if (UseCompressedOops) unimplemented("coop"); // field accesses must decode
// load mh.type.form.vmslots // load mh.type.form.vmslots
if (java_dyn_MethodHandle::vmslots_offset_in_bytes() != 0) { if (java_dyn_MethodHandle::vmslots_offset_in_bytes() != 0) {
// hoist vmslots into every mh to avoid dependent load chain // hoist vmslots into every mh to avoid dependent load chain
ld( Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmslots_offset_in_bytes, temp_reg)), vmslots_reg); ld( Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmslots_offset_in_bytes, temp_reg)), vmslots_reg);
} else { } else {
Register temp2_reg = vmslots_reg; Register temp2_reg = vmslots_reg;
ld_ptr(Address(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg)), temp2_reg); load_heap_oop(Address(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg)), temp2_reg);
ld_ptr(Address(temp2_reg, delayed_value(java_dyn_MethodType::form_offset_in_bytes, temp_reg)), temp2_reg); load_heap_oop(Address(temp2_reg, delayed_value(java_dyn_MethodType::form_offset_in_bytes, temp_reg)), temp2_reg);
ld( Address(temp2_reg, delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, temp_reg)), vmslots_reg); ld( Address(temp2_reg, delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, temp_reg)), vmslots_reg);
} }
} }
@ -3130,9 +3128,8 @@ void MacroAssembler::jump_to_method_handle_entry(Register mh_reg, Register temp_
assert(mh_reg == G3_method_handle, "caller must put MH object in G3"); assert(mh_reg == G3_method_handle, "caller must put MH object in G3");
assert_different_registers(mh_reg, temp_reg); assert_different_registers(mh_reg, temp_reg);
if (UseCompressedOops) unimplemented("coop"); // field accesses must decode
// pick out the interpreted side of the handler // pick out the interpreted side of the handler
// NOTE: vmentry is not an oop!
ld_ptr(mh_reg, delayed_value(java_dyn_MethodHandle::vmentry_offset_in_bytes, temp_reg), temp_reg); ld_ptr(mh_reg, delayed_value(java_dyn_MethodHandle::vmentry_offset_in_bytes, temp_reg), temp_reg);
// off we go... // off we go...
@ -4653,6 +4650,11 @@ void MacroAssembler::load_heap_oop(Register s1, int simm13a, Register d) {
} }
} }
void MacroAssembler::load_heap_oop(Register s1, RegisterOrConstant s2, Register d) {
if (s2.is_constant()) load_heap_oop(s1, s2.as_constant(), d);
else load_heap_oop(s1, s2.as_register(), d);
}
void MacroAssembler::store_heap_oop(Register d, Register s1, Register s2) { void MacroAssembler::store_heap_oop(Register d, Register s1, Register s2) {
if (UseCompressedOops) { if (UseCompressedOops) {
assert(s1 != d && s2 != d, "not enough registers"); assert(s1 != d && s2 != d, "not enough registers");

View File

@ -825,6 +825,12 @@ class Assembler : public AbstractAssembler {
// test if -4096 <= x <= 4095 // test if -4096 <= x <= 4095
static bool is_simm13(int x) { return is_simm(x, 13); } static bool is_simm13(int x) { return is_simm(x, 13); }
// test if label is in simm16 range in words (wdisp16).
bool is_in_wdisp16_range(Label& L) {
intptr_t d = intptr_t(pc()) - intptr_t(target(L));
return is_simm(d, 18);
}
enum ASIs { // page 72, v9 enum ASIs { // page 72, v9
ASI_PRIMARY = 0x80, ASI_PRIMARY = 0x80,
ASI_PRIMARY_LITTLE = 0x88 ASI_PRIMARY_LITTLE = 0x88
@ -2103,6 +2109,7 @@ public:
void load_heap_oop(const Address& s, Register d); void load_heap_oop(const Address& s, Register d);
void load_heap_oop(Register s1, Register s2, Register d); void load_heap_oop(Register s1, Register s2, Register d);
void load_heap_oop(Register s1, int simm13a, Register d); void load_heap_oop(Register s1, int simm13a, Register d);
void load_heap_oop(Register s1, RegisterOrConstant s2, Register d);
void store_heap_oop(Register d, Register s1, Register s2); void store_heap_oop(Register d, Register s1, Register s2);
void store_heap_oop(Register d, Register s1, int simm13a); void store_heap_oop(Register d, Register s1, int simm13a);
void store_heap_oop(Register d, const Address& a, int offset = 0); void store_heap_oop(Register d, const Address& a, int offset = 0);
@ -2225,7 +2232,7 @@ public:
void stop(const char* msg); // prints msg, dumps registers and stops execution void stop(const char* msg); // prints msg, dumps registers and stops execution
void warn(const char* msg); // prints msg, but don't stop void warn(const char* msg); // prints msg, but don't stop
void untested(const char* what = ""); void untested(const char* what = "");
void unimplemented(const char* what = "") { char* b = new char[1024]; sprintf(b, "unimplemented: %s", what); stop(b); } void unimplemented(const char* what = "") { char* b = new char[1024]; jio_snprintf(b, 1024, "unimplemented: %s", what); stop(b); }
void should_not_reach_here() { stop("should not reach here"); } void should_not_reach_here() { stop("should not reach here"); }
void print_CPU_state(); void print_CPU_state();

View File

@ -32,6 +32,7 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception) : _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
, _index(index) , _index(index)
{ {
assert(info != NULL, "must have info");
_info = new CodeEmitInfo(info); _info = new CodeEmitInfo(info);
} }
@ -424,8 +425,13 @@ void G1PreBarrierStub::emit_code(LIR_Assembler* ce) {
Register pre_val_reg = pre_val()->as_register(); Register pre_val_reg = pre_val()->as_register();
ce->mem2reg(addr(), pre_val(), T_OBJECT, patch_code(), info(), false); ce->mem2reg(addr(), pre_val(), T_OBJECT, patch_code(), info(), false);
__ br_on_reg_cond(Assembler::rc_z, /*annul*/false, Assembler::pt, if (__ is_in_wdisp16_range(_continuation)) {
pre_val_reg, _continuation); __ br_on_reg_cond(Assembler::rc_z, /*annul*/false, Assembler::pt,
pre_val_reg, _continuation);
} else {
__ cmp(pre_val_reg, G0);
__ brx(Assembler::equal, false, Assembler::pn, _continuation);
}
__ delayed()->nop(); __ delayed()->nop();
__ call(Runtime1::entry_for(Runtime1::Runtime1::g1_pre_barrier_slow_id)); __ call(Runtime1::entry_for(Runtime1::Runtime1::g1_pre_barrier_slow_id));
@ -451,8 +457,13 @@ void G1PostBarrierStub::emit_code(LIR_Assembler* ce) {
assert(new_val()->is_register(), "Precondition."); assert(new_val()->is_register(), "Precondition.");
Register addr_reg = addr()->as_pointer_register(); Register addr_reg = addr()->as_pointer_register();
Register new_val_reg = new_val()->as_register(); Register new_val_reg = new_val()->as_register();
__ br_on_reg_cond(Assembler::rc_z, /*annul*/false, Assembler::pt, if (__ is_in_wdisp16_range(_continuation)) {
new_val_reg, _continuation); __ br_on_reg_cond(Assembler::rc_z, /*annul*/false, Assembler::pt,
new_val_reg, _continuation);
} else {
__ cmp(new_val_reg, G0);
__ brx(Assembler::equal, false, Assembler::pn, _continuation);
}
__ delayed()->nop(); __ delayed()->nop();
__ call(Runtime1::entry_for(Runtime1::Runtime1::g1_post_barrier_slow_id)); __ call(Runtime1::entry_for(Runtime1::Runtime1::g1_post_barrier_slow_id));

View File

@ -420,7 +420,8 @@ int LIR_Assembler::emit_unwind_handler() {
} }
if (compilation()->env()->dtrace_method_probes()) { if (compilation()->env()->dtrace_method_probes()) {
jobject2reg(method()->constant_encoding(), O0); __ mov(G2_thread, O0);
jobject2reg(method()->constant_encoding(), O1);
__ call(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), relocInfo::runtime_call_type); __ call(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), relocInfo::runtime_call_type);
__ delayed()->nop(); __ delayed()->nop();
} }

View File

@ -311,7 +311,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
bool needs_range_check = true; bool needs_range_check = true;
bool use_length = x->length() != NULL; bool use_length = x->length() != NULL;
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT; bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
@ -386,7 +386,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.load_item(); obj.load_item();
@ -398,7 +398,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
CodeEmitInfo* info_for_exception = NULL; CodeEmitInfo* info_for_exception = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info_for_exception = state_for(x, x->lock_stack_before()); info_for_exception = state_for(x);
} }
// this CodeEmitInfo must not have the xhandlers because here the // this CodeEmitInfo must not have the xhandlers because here the
@ -409,7 +409,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
void LIRGenerator::do_MonitorExit(MonitorExit* x) { void LIRGenerator::do_MonitorExit(MonitorExit* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.dont_load_item(); obj.dont_load_item();
@ -664,7 +664,7 @@ void LIRGenerator::do_CompareAndSwap(Intrinsic* x, ValueType* type) {
// Use temps to avoid kills // Use temps to avoid kills
LIR_Opr t1 = FrameMap::G1_opr; LIR_Opr t1 = FrameMap::G1_opr;
LIR_Opr t2 = FrameMap::G3_opr; LIR_Opr t2 = FrameMap::G3_opr;
LIR_Opr addr = new_pointer_register(); LIR_Opr addr = (type == objectType) ? new_register(T_OBJECT) : new_pointer_register();
// get address of field // get address of field
obj.load_item(); obj.load_item();
@ -871,10 +871,11 @@ void LIRGenerator::do_NewInstance(NewInstance* x) {
// This instruction can be deoptimized in the slow path : use // This instruction can be deoptimized in the slow path : use
// O0 as result register. // O0 as result register.
const LIR_Opr reg = result_register_for(x->type()); const LIR_Opr reg = result_register_for(x->type());
#ifndef PRODUCT
if (PrintNotLoaded && !x->klass()->is_loaded()) { if (PrintNotLoaded && !x->klass()->is_loaded()) {
tty->print_cr(" ###class not loaded at new bci %d", x->bci()); tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
} }
#endif
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr tmp1 = FrameMap::G1_oop_opr; LIR_Opr tmp1 = FrameMap::G1_oop_opr;
LIR_Opr tmp2 = FrameMap::G3_oop_opr; LIR_Opr tmp2 = FrameMap::G3_oop_opr;
@ -1018,7 +1019,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
obj.load_item(); obj.load_item();
LIR_Opr out_reg = rlock_result(x); LIR_Opr out_reg = rlock_result(x);
CodeStub* stub; CodeStub* stub;
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks()); CodeEmitInfo* info_for_exception = state_for(x);
if (x->is_incompatible_class_change_check()) { if (x->is_incompatible_class_change_check()) {
assert(patching_info == NULL, "can't patch this"); assert(patching_info == NULL, "can't patch this");

View File

@ -64,7 +64,7 @@ inline bool LinearScanWalker::pd_init_regs_for_alloc(Interval* cur) {
_first_reg = pd_first_callee_saved_reg; _first_reg = pd_first_callee_saved_reg;
_last_reg = pd_last_callee_saved_reg; _last_reg = pd_last_callee_saved_reg;
return true; return true;
} else if (cur->type() == T_INT || cur->type() == T_LONG || cur->type() == T_OBJECT) { } else if (cur->type() == T_INT || cur->type() == T_LONG || cur->type() == T_OBJECT || cur->type() == T_ADDRESS) {
_first_reg = pd_first_cpu_reg; _first_reg = pd_first_cpu_reg;
_last_reg = pd_last_allocatable_cpu_reg; _last_reg = pd_last_allocatable_cpu_reg;
return true; return true;

View File

@ -27,6 +27,14 @@
#define __ _masm-> #define __ _masm->
#ifdef PRODUCT
#define BLOCK_COMMENT(str) /* nothing */
#else
#define BLOCK_COMMENT(str) __ block_comment(str)
#endif
#define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
address MethodHandleEntry::start_compiled_entry(MacroAssembler* _masm, address MethodHandleEntry::start_compiled_entry(MacroAssembler* _masm,
address interpreted_entry) { address interpreted_entry) {
// Just before the actual machine code entry point, allocate space // Just before the actual machine code entry point, allocate space
@ -90,8 +98,8 @@ address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler*
} }
// given the MethodType, find out where the MH argument is buried // given the MethodType, find out where the MH argument is buried
__ ld_ptr(Address(G5_method_type, __ delayed_value(java_dyn_MethodType::form_offset_in_bytes, O1_scratch)), O0_argslot); __ load_heap_oop(Address(G5_method_type, __ delayed_value(java_dyn_MethodType::form_offset_in_bytes, O1_scratch)), O0_argslot);
__ ldsw( Address(O0_argslot, __ delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, O1_scratch)), O0_argslot); __ ldsw( Address(O0_argslot, __ delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, O1_scratch)), O0_argslot);
__ ld_ptr(__ argument_address(O0_argslot), G3_method_handle); __ ld_ptr(__ argument_address(O0_argslot), G3_method_handle);
__ check_method_handle_type(G5_method_type, G3_method_handle, O1_scratch, wrong_method_type); __ check_method_handle_type(G5_method_type, G3_method_handle, O1_scratch, wrong_method_type);
@ -105,6 +113,7 @@ address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler*
static void verify_argslot(MacroAssembler* _masm, Register argslot_reg, Register temp_reg, const char* error_message) { static void verify_argslot(MacroAssembler* _masm, Register argslot_reg, Register temp_reg, const char* error_message) {
// Verify that argslot lies within (Gargs, FP]. // Verify that argslot lies within (Gargs, FP].
Label L_ok, L_bad; Label L_ok, L_bad;
BLOCK_COMMENT("{ verify_argslot");
#ifdef _LP64 #ifdef _LP64
__ add(FP, STACK_BIAS, temp_reg); __ add(FP, STACK_BIAS, temp_reg);
__ cmp(argslot_reg, temp_reg); __ cmp(argslot_reg, temp_reg);
@ -119,6 +128,7 @@ static void verify_argslot(MacroAssembler* _masm, Register argslot_reg, Register
__ bind(L_bad); __ bind(L_bad);
__ stop(error_message); __ stop(error_message);
__ bind(L_ok); __ bind(L_ok);
BLOCK_COMMENT("} verify_argslot");
} }
#endif #endif
@ -175,6 +185,7 @@ void MethodHandles::insert_arg_slots(MacroAssembler* _masm,
// for (temp = sp + size; temp < argslot; temp++) // for (temp = sp + size; temp < argslot; temp++)
// temp[-size] = temp[0] // temp[-size] = temp[0]
// argslot -= size; // argslot -= size;
BLOCK_COMMENT("insert_arg_slots {");
RegisterOrConstant offset = __ regcon_sll_ptr(arg_slots, LogBytesPerWord, temp3_reg); RegisterOrConstant offset = __ regcon_sll_ptr(arg_slots, LogBytesPerWord, temp3_reg);
// Keep the stack pointer 2*wordSize aligned. // Keep the stack pointer 2*wordSize aligned.
@ -187,7 +198,7 @@ void MethodHandles::insert_arg_slots(MacroAssembler* _masm,
{ {
Label loop; Label loop;
__ bind(loop); __ BIND(loop);
// pull one word down each time through the loop // pull one word down each time through the loop
__ ld_ptr(Address(temp_reg, 0), temp2_reg); __ ld_ptr(Address(temp_reg, 0), temp2_reg);
__ st_ptr(temp2_reg, Address(temp_reg, offset)); __ st_ptr(temp2_reg, Address(temp_reg, offset));
@ -199,6 +210,7 @@ void MethodHandles::insert_arg_slots(MacroAssembler* _masm,
// Now move the argslot down, to point to the opened-up space. // Now move the argslot down, to point to the opened-up space.
__ add(argslot_reg, offset, argslot_reg); __ add(argslot_reg, offset, argslot_reg);
BLOCK_COMMENT("} insert_arg_slots");
} }
@ -235,6 +247,7 @@ void MethodHandles::remove_arg_slots(MacroAssembler* _masm,
} }
#endif // ASSERT #endif // ASSERT
BLOCK_COMMENT("remove_arg_slots {");
// Pull up everything shallower than argslot. // Pull up everything shallower than argslot.
// Then remove the excess space on the stack. // Then remove the excess space on the stack.
// The stacked return address gets pulled up with everything else. // The stacked return address gets pulled up with everything else.
@ -246,7 +259,7 @@ void MethodHandles::remove_arg_slots(MacroAssembler* _masm,
__ sub(argslot_reg, wordSize, temp_reg); // source pointer for copy __ sub(argslot_reg, wordSize, temp_reg); // source pointer for copy
{ {
Label loop; Label loop;
__ bind(loop); __ BIND(loop);
// pull one word up each time through the loop // pull one word up each time through the loop
__ ld_ptr(Address(temp_reg, 0), temp2_reg); __ ld_ptr(Address(temp_reg, 0), temp2_reg);
__ st_ptr(temp2_reg, Address(temp_reg, offset)); __ st_ptr(temp2_reg, Address(temp_reg, offset));
@ -265,29 +278,35 @@ void MethodHandles::remove_arg_slots(MacroAssembler* _masm,
const int TwoWordAlignmentMask = right_n_bits(LogBytesPerWord + 1); const int TwoWordAlignmentMask = right_n_bits(LogBytesPerWord + 1);
RegisterOrConstant masked_offset = __ regcon_andn_ptr(offset, TwoWordAlignmentMask, temp_reg); RegisterOrConstant masked_offset = __ regcon_andn_ptr(offset, TwoWordAlignmentMask, temp_reg);
__ add(SP, masked_offset, SP); __ add(SP, masked_offset, SP);
BLOCK_COMMENT("} remove_arg_slots");
} }
#ifndef PRODUCT #ifndef PRODUCT
extern "C" void print_method_handle(oop mh); extern "C" void print_method_handle(oop mh);
void trace_method_handle_stub(const char* adaptername, void trace_method_handle_stub(const char* adaptername,
oop mh) { oopDesc* mh) {
#if 0
intptr_t* entry_sp,
intptr_t* saved_sp,
intptr_t* saved_bp) {
// called as a leaf from native code: do not block the JVM!
intptr_t* last_sp = (intptr_t*) saved_bp[frame::interpreter_frame_last_sp_offset];
intptr_t* base_sp = (intptr_t*) saved_bp[frame::interpreter_frame_monitor_block_top_offset];
printf("MH %s mh="INTPTR_FORMAT" sp=("INTPTR_FORMAT"+"INTX_FORMAT") stack_size="INTX_FORMAT" bp="INTPTR_FORMAT"\n",
adaptername, (intptr_t)mh, (intptr_t)entry_sp, (intptr_t)(saved_sp - entry_sp), (intptr_t)(base_sp - last_sp), (intptr_t)saved_bp);
if (last_sp != saved_sp)
printf("*** last_sp="INTPTR_FORMAT"\n", (intptr_t)last_sp);
#endif
printf("MH %s mh="INTPTR_FORMAT"\n", adaptername, (intptr_t) mh); printf("MH %s mh="INTPTR_FORMAT"\n", adaptername, (intptr_t) mh);
print_method_handle(mh); print_method_handle(mh);
} }
void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
if (!TraceMethodHandles) return;
BLOCK_COMMENT("trace_method_handle {");
// save: Gargs, O5_savedSP
__ save_frame(16);
__ set((intptr_t) adaptername, O0);
__ mov(G3_method_handle, O1);
__ mov(G3_method_handle, L3);
__ mov(Gargs, L4);
__ mov(G5_method_type, L5);
__ call_VM_leaf(L7, CAST_FROM_FN_PTR(address, trace_method_handle_stub));
__ mov(L3, G3_method_handle);
__ mov(L4, Gargs);
__ mov(L5, G5_method_type);
__ restore();
BLOCK_COMMENT("} trace_method_handle");
}
#endif // PRODUCT #endif // PRODUCT
// which conversion op types are implemented here? // which conversion op types are implemented here?
@ -348,18 +367,8 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
} }
address interp_entry = __ pc(); address interp_entry = __ pc();
if (UseCompressedOops) __ unimplemented("UseCompressedOops");
#ifndef PRODUCT trace_method_handle(_masm, entry_name(ek));
if (TraceMethodHandles) {
// save: Gargs, O5_savedSP
__ save(SP, -16*wordSize, SP);
__ set((intptr_t) entry_name(ek), O0);
__ mov(G3_method_handle, O1);
__ call_VM_leaf(Lscratch, CAST_FROM_FN_PTR(address, trace_method_handle_stub));
__ restore(SP, 16*wordSize, SP);
}
#endif // PRODUCT
switch ((int) ek) { switch ((int) ek) {
case _raise_exception: case _raise_exception:
@ -413,7 +422,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
case _invokestatic_mh: case _invokestatic_mh:
case _invokespecial_mh: case _invokespecial_mh:
{ {
__ ld_ptr(G3_mh_vmtarget, G5_method); // target is a methodOop __ load_heap_oop(G3_mh_vmtarget, G5_method); // target is a methodOop
__ verify_oop(G5_method); __ verify_oop(G5_method);
// Same as TemplateTable::invokestatic or invokespecial, // Same as TemplateTable::invokestatic or invokespecial,
// minus the CP setup and profiling: // minus the CP setup and profiling:
@ -468,7 +477,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
// minus the CP setup and profiling: // minus the CP setup and profiling:
__ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch); __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
Register O1_intf = O1_scratch; Register O1_intf = O1_scratch;
__ ld_ptr(G3_mh_vmtarget, O1_intf); __ load_heap_oop(G3_mh_vmtarget, O1_intf);
__ ldsw(G3_dmh_vmindex, G5_index); __ ldsw(G3_dmh_vmindex, G5_index);
__ ld_ptr(__ argument_address(O0_argslot, -1), G3_method_handle); __ ld_ptr(__ argument_address(O0_argslot, -1), G3_method_handle);
__ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes()); __ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes());
@ -523,7 +532,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
insert_arg_slots(_masm, arg_slots * stack_move_unit(), arg_mask, O0_argslot, O1_scratch, O2_scratch, G5_index); insert_arg_slots(_masm, arg_slots * stack_move_unit(), arg_mask, O0_argslot, O1_scratch, O2_scratch, G5_index);
// Store bound argument into the new stack slot: // Store bound argument into the new stack slot:
__ ld_ptr(G3_bmh_argument, O1_scratch); __ load_heap_oop(G3_bmh_argument, O1_scratch);
if (arg_type == T_OBJECT) { if (arg_type == T_OBJECT) {
__ st_ptr(O1_scratch, Address(O0_argslot, 0)); __ st_ptr(O1_scratch, Address(O0_argslot, 0));
} else { } else {
@ -541,12 +550,12 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
} }
if (direct_to_method) { if (direct_to_method) {
__ ld_ptr(G3_mh_vmtarget, G5_method); // target is a methodOop __ load_heap_oop(G3_mh_vmtarget, G5_method); // target is a methodOop
__ verify_oop(G5_method); __ verify_oop(G5_method);
__ jump_indirect_to(G5_method_fie, O1_scratch); __ jump_indirect_to(G5_method_fie, O1_scratch);
__ delayed()->nop(); __ delayed()->nop();
} else { } else {
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); // target is a methodOop __ load_heap_oop(G3_mh_vmtarget, G3_method_handle); // target is a methodOop
__ verify_oop(G3_method_handle); __ verify_oop(G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
@ -556,7 +565,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
case _adapter_retype_only: case _adapter_retype_only:
case _adapter_retype_raw: case _adapter_retype_raw:
// Immediately jump to the next MH layer: // Immediately jump to the next MH layer:
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
// This is OK when all parameter types widen. // This is OK when all parameter types widen.
// It is also OK when a return type narrows. // It is also OK when a return type narrows.
@ -572,8 +581,8 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
Address vmarg = __ argument_address(O0_argslot); Address vmarg = __ argument_address(O0_argslot);
// What class are we casting to? // What class are we casting to?
__ ld_ptr(G3_amh_argument, G5_klass); // This is a Class object! __ load_heap_oop(G3_amh_argument, G5_klass); // This is a Class object!
__ ld_ptr(Address(G5_klass, java_lang_Class::klass_offset_in_bytes()), G5_klass); __ load_heap_oop(Address(G5_klass, java_lang_Class::klass_offset_in_bytes()), G5_klass);
Label done; Label done;
__ ld_ptr(vmarg, O1_scratch); __ ld_ptr(vmarg, O1_scratch);
@ -590,14 +599,14 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
// If we get here, the type check failed! // If we get here, the type check failed!
__ ldsw(G3_amh_vmargslot, O0_argslot); // reload argslot field __ ldsw(G3_amh_vmargslot, O0_argslot); // reload argslot field
__ ld_ptr(G3_amh_argument, O3_scratch); // required class __ load_heap_oop(G3_amh_argument, O3_scratch); // required class
__ ld_ptr(vmarg, O2_scratch); // bad object __ ld_ptr(vmarg, O2_scratch); // bad object
__ jump_to(AddressLiteral(from_interpreted_entry(_raise_exception)), O0_argslot); __ jump_to(AddressLiteral(from_interpreted_entry(_raise_exception)), O0_argslot);
__ delayed()->mov(Bytecodes::_checkcast, O1_scratch); // who is complaining? __ delayed()->mov(Bytecodes::_checkcast, O1_scratch); // who is complaining?
__ bind(done); __ bind(done);
// Get the new MH: // Get the new MH:
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;
@ -676,7 +685,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ st(O1_scratch, vmarg); __ st(O1_scratch, vmarg);
// Get the new MH: // Get the new MH:
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;
@ -721,7 +730,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
ShouldNotReachHere(); ShouldNotReachHere();
} }
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;
@ -851,7 +860,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
} }
} }
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;
@ -895,7 +904,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ brx(Assembler::less, false, Assembler::pt, loop); __ brx(Assembler::less, false, Assembler::pt, loop);
__ delayed()->nop(); // FILLME __ delayed()->nop(); // FILLME
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;
@ -913,7 +922,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
remove_arg_slots(_masm, G5_stack_move, O0_argslot, O1_scratch, O2_scratch, O3_scratch); remove_arg_slots(_masm, G5_stack_move, O0_argslot, O1_scratch, O2_scratch, O3_scratch);
__ ld_ptr(G3_mh_vmtarget, G3_method_handle); __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
__ jump_to_method_handle_entry(G3_method_handle, O1_scratch); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
} }
break; break;

View File

@ -2586,6 +2586,8 @@ class StubGenerator: public StubCodeGenerator {
__ restore(); __ restore();
#endif #endif
assert_clean_int(O2_count, G1); // Make sure 'count' is clean int.
#ifdef ASSERT #ifdef ASSERT
// caller guarantees that the arrays really are different // caller guarantees that the arrays really are different
// otherwise, we would have to make conjoint checks // otherwise, we would have to make conjoint checks
@ -2600,8 +2602,6 @@ class StubGenerator: public StubCodeGenerator {
} }
#endif //ASSERT #endif //ASSERT
assert_clean_int(O2_count, G1); // Make sure 'count' is clean int.
checkcast_copy_entry = __ pc(); checkcast_copy_entry = __ pc();
// caller can pass a 64-bit byte count here (from generic stub) // caller can pass a 64-bit byte count here (from generic stub)
BLOCK_COMMENT("Entry:"); BLOCK_COMMENT("Entry:");

View File

@ -43,7 +43,7 @@ enum /* platform_dependent_constants */ {
// MethodHandles adapters // MethodHandles adapters
enum method_handles_platform_dependent_constants { enum method_handles_platform_dependent_constants {
method_handles_adapters_code_size = 12000 method_handles_adapters_code_size = 15000
}; };
class Sparc { class Sparc {

View File

@ -3273,7 +3273,7 @@ void TemplateTable::invokedynamic(int byte_no) {
__ sll(Rret, LogBytesPerWord, Rret); __ sll(Rret, LogBytesPerWord, Rret);
__ ld_ptr(Rtemp, Rret, Rret); // get return address __ ld_ptr(Rtemp, Rret, Rret); // get return address
__ ld_ptr(G5_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle); __ load_heap_oop(G5_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle);
__ null_check(G3_method_handle); __ null_check(G3_method_handle);
// Adjust Rret first so Llast_SP can be same as Rret // Adjust Rret first so Llast_SP can be same as Rret

View File

@ -7709,9 +7709,14 @@ RegisterOrConstant MacroAssembler::delayed_value_impl(intptr_t* delayed_value_ad
void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_reg, void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_reg,
Register temp_reg, Register temp_reg,
Label& wrong_method_type) { Label& wrong_method_type) {
if (UseCompressedOops) unimplemented(); // field accesses must decode Address type_addr(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg));
// compare method type against that of the receiver // compare method type against that of the receiver
cmpptr(mtype_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg))); if (UseCompressedOops) {
load_heap_oop(temp_reg, type_addr);
cmpptr(mtype_reg, temp_reg);
} else {
cmpptr(mtype_reg, type_addr);
}
jcc(Assembler::notEqual, wrong_method_type); jcc(Assembler::notEqual, wrong_method_type);
} }
@ -7723,15 +7728,14 @@ void MacroAssembler::check_method_handle_type(Register mtype_reg, Register mh_re
void MacroAssembler::load_method_handle_vmslots(Register vmslots_reg, Register mh_reg, void MacroAssembler::load_method_handle_vmslots(Register vmslots_reg, Register mh_reg,
Register temp_reg) { Register temp_reg) {
assert_different_registers(vmslots_reg, mh_reg, temp_reg); assert_different_registers(vmslots_reg, mh_reg, temp_reg);
if (UseCompressedOops) unimplemented(); // field accesses must decode
// load mh.type.form.vmslots // load mh.type.form.vmslots
if (java_dyn_MethodHandle::vmslots_offset_in_bytes() != 0) { if (java_dyn_MethodHandle::vmslots_offset_in_bytes() != 0) {
// hoist vmslots into every mh to avoid dependent load chain // hoist vmslots into every mh to avoid dependent load chain
movl(vmslots_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmslots_offset_in_bytes, temp_reg))); movl(vmslots_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmslots_offset_in_bytes, temp_reg)));
} else { } else {
Register temp2_reg = vmslots_reg; Register temp2_reg = vmslots_reg;
movptr(temp2_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg))); load_heap_oop(temp2_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::type_offset_in_bytes, temp_reg)));
movptr(temp2_reg, Address(temp2_reg, delayed_value(java_dyn_MethodType::form_offset_in_bytes, temp_reg))); load_heap_oop(temp2_reg, Address(temp2_reg, delayed_value(java_dyn_MethodType::form_offset_in_bytes, temp_reg)));
movl(vmslots_reg, Address(temp2_reg, delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, temp_reg))); movl(vmslots_reg, Address(temp2_reg, delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, temp_reg)));
} }
} }
@ -7745,9 +7749,8 @@ void MacroAssembler::jump_to_method_handle_entry(Register mh_reg, Register temp_
assert(mh_reg == rcx, "caller must put MH object in rcx"); assert(mh_reg == rcx, "caller must put MH object in rcx");
assert_different_registers(mh_reg, temp_reg); assert_different_registers(mh_reg, temp_reg);
if (UseCompressedOops) unimplemented(); // field accesses must decode
// pick out the interpreted side of the handler // pick out the interpreted side of the handler
// NOTE: vmentry is not an oop!
movptr(temp_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmentry_offset_in_bytes, temp_reg))); movptr(temp_reg, Address(mh_reg, delayed_value(java_dyn_MethodHandle::vmentry_offset_in_bytes, temp_reg)));
// off we go... // off we go...
@ -8238,6 +8241,40 @@ void MacroAssembler::store_klass(Register dst, Register src) {
movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src); movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
} }
void MacroAssembler::load_heap_oop(Register dst, Address src) {
#ifdef _LP64
if (UseCompressedOops) {
movl(dst, src);
decode_heap_oop(dst);
} else
#endif
movptr(dst, src);
}
void MacroAssembler::store_heap_oop(Address dst, Register src) {
#ifdef _LP64
if (UseCompressedOops) {
assert(!dst.uses(src), "not enough registers");
encode_heap_oop(src);
movl(dst, src);
} else
#endif
movptr(dst, src);
}
// Used for storing NULLs.
void MacroAssembler::store_heap_oop_null(Address dst) {
#ifdef _LP64
if (UseCompressedOops) {
movl(dst, (int32_t)NULL_WORD);
} else {
movslq(dst, (int32_t)NULL_WORD);
}
#else
movl(dst, (int32_t)NULL_WORD);
#endif
}
#ifdef _LP64 #ifdef _LP64
void MacroAssembler::store_klass_gap(Register dst, Register src) { void MacroAssembler::store_klass_gap(Register dst, Register src) {
if (UseCompressedOops) { if (UseCompressedOops) {
@ -8246,34 +8283,6 @@ void MacroAssembler::store_klass_gap(Register dst, Register src) {
} }
} }
void MacroAssembler::load_heap_oop(Register dst, Address src) {
if (UseCompressedOops) {
movl(dst, src);
decode_heap_oop(dst);
} else {
movq(dst, src);
}
}
void MacroAssembler::store_heap_oop(Address dst, Register src) {
if (UseCompressedOops) {
assert(!dst.uses(src), "not enough registers");
encode_heap_oop(src);
movl(dst, src);
} else {
movq(dst, src);
}
}
// Used for storing NULLs.
void MacroAssembler::store_heap_oop_null(Address dst) {
if (UseCompressedOops) {
movl(dst, (int32_t)NULL_WORD);
} else {
movslq(dst, (int32_t)NULL_WORD);
}
}
#ifdef ASSERT #ifdef ASSERT
void MacroAssembler::verify_heapbase(const char* msg) { void MacroAssembler::verify_heapbase(const char* msg) {
assert (UseCompressedOops, "should be compressed"); assert (UseCompressedOops, "should be compressed");

View File

@ -1682,24 +1682,24 @@ class MacroAssembler: public Assembler {
void load_klass(Register dst, Register src); void load_klass(Register dst, Register src);
void store_klass(Register dst, Register src); void store_klass(Register dst, Register src);
void load_heap_oop(Register dst, Address src);
void store_heap_oop(Address dst, Register src);
// Used for storing NULL. All other oop constants should be
// stored using routines that take a jobject.
void store_heap_oop_null(Address dst);
void load_prototype_header(Register dst, Register src); void load_prototype_header(Register dst, Register src);
#ifdef _LP64 #ifdef _LP64
void store_klass_gap(Register dst, Register src); void store_klass_gap(Register dst, Register src);
void load_heap_oop(Register dst, Address src);
void store_heap_oop(Address dst, Register src);
// This dummy is to prevent a call to store_heap_oop from // This dummy is to prevent a call to store_heap_oop from
// converting a zero (like NULL) into a Register by giving // converting a zero (like NULL) into a Register by giving
// the compiler two choices it can't resolve // the compiler two choices it can't resolve
void store_heap_oop(Address dst, void* dummy); void store_heap_oop(Address dst, void* dummy);
// Used for storing NULL. All other oop constants should be
// stored using routines that take a jobject.
void store_heap_oop_null(Address dst);
void encode_heap_oop(Register r); void encode_heap_oop(Register r);
void decode_heap_oop(Register r); void decode_heap_oop(Register r);
void encode_heap_oop_not_null(Register r); void encode_heap_oop_not_null(Register r);
@ -1927,7 +1927,7 @@ class MacroAssembler: public Assembler {
void untested() { stop("untested"); } void untested() { stop("untested"); }
void unimplemented(const char* what = "") { char* b = new char[1024]; jio_snprintf(b, sizeof(b), "unimplemented: %s", what); stop(b); } void unimplemented(const char* what = "") { char* b = new char[1024]; jio_snprintf(b, 1024, "unimplemented: %s", what); stop(b); }
void should_not_reach_here() { stop("should not reach here"); } void should_not_reach_here() { stop("should not reach here"); }

View File

@ -83,7 +83,8 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception) : _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
, _index(index) , _index(index)
{ {
_info = info == NULL ? NULL : new CodeEmitInfo(info); assert(info != NULL, "must have info");
_info = new CodeEmitInfo(info);
} }

View File

@ -488,7 +488,9 @@ int LIR_Assembler::emit_unwind_handler() {
} }
if (compilation()->env()->dtrace_method_probes()) { if (compilation()->env()->dtrace_method_probes()) {
__ movoop(Address(rsp, 0), method()->constant_encoding()); __ get_thread(rax);
__ movptr(Address(rsp, 0), rax);
__ movoop(Address(rsp, sizeof(void*)), method()->constant_encoding());
__ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit))); __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit)));
} }
@ -1939,8 +1941,6 @@ void LIR_Assembler::emit_compare_and_swap(LIR_OpCompareAndSwap* op) {
__ cmpxchgptr(newval, Address(addr, 0)); __ cmpxchgptr(newval, Address(addr, 0));
} else if (op->code() == lir_cas_int) { } else if (op->code() == lir_cas_int) {
__ cmpxchgl(newval, Address(addr, 0)); __ cmpxchgl(newval, Address(addr, 0));
} else {
LP64_ONLY(__ cmpxchgq(newval, Address(addr, 0)));
} }
#ifdef _LP64 #ifdef _LP64
} else if (op->code() == lir_cas_long) { } else if (op->code() == lir_cas_long) {

View File

@ -107,7 +107,7 @@ bool LIRGenerator::can_store_as_constant(Value v, BasicType type) const {
return false; return false;
} }
Constant* c = v->as_Constant(); Constant* c = v->as_Constant();
if (c && c->state() == NULL) { if (c && c->state_before() == NULL) {
// constants of any type can be stored directly, except for // constants of any type can be stored directly, except for
// unloaded object constants. // unloaded object constants.
return true; return true;
@ -250,7 +250,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
bool needs_range_check = true; bool needs_range_check = true;
bool use_length = x->length() != NULL; bool use_length = x->length() != NULL;
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT; bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
@ -325,7 +325,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.load_item(); obj.load_item();
@ -341,7 +341,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
CodeEmitInfo* info_for_exception = NULL; CodeEmitInfo* info_for_exception = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info_for_exception = state_for(x, x->lock_stack_before()); info_for_exception = state_for(x);
} }
// this CodeEmitInfo must not have the xhandlers because here the // this CodeEmitInfo must not have the xhandlers because here the
// object is already locked (xhandlers expect object to be unlocked) // object is already locked (xhandlers expect object to be unlocked)
@ -352,7 +352,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
void LIRGenerator::do_MonitorExit(MonitorExit* x) { void LIRGenerator::do_MonitorExit(MonitorExit* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.dont_load_item(); obj.dont_load_item();
@ -765,7 +765,7 @@ void LIRGenerator::do_CompareAndSwap(Intrinsic* x, ValueType* type) {
ShouldNotReachHere(); ShouldNotReachHere();
} }
LIR_Opr addr = new_pointer_register(); LIR_Opr addr = (type == objectType) ? new_register(T_OBJECT) : new_pointer_register();
LIR_Address* a; LIR_Address* a;
if(offset.result()->is_constant()) { if(offset.result()->is_constant()) {
a = new LIR_Address(obj.result(), a = new LIR_Address(obj.result(),
@ -984,9 +984,11 @@ void LIRGenerator::do_Convert(Convert* x) {
void LIRGenerator::do_NewInstance(NewInstance* x) { void LIRGenerator::do_NewInstance(NewInstance* x) {
#ifndef PRODUCT
if (PrintNotLoaded && !x->klass()->is_loaded()) { if (PrintNotLoaded && !x->klass()->is_loaded()) {
tty->print_cr(" ###class not loaded at new bci %d", x->bci()); tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
} }
#endif
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr reg = result_register_for(x->type()); LIR_Opr reg = result_register_for(x->type());
LIR_Opr klass_reg = new_register(objectType); LIR_Opr klass_reg = new_register(objectType);
@ -1127,7 +1129,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
obj.load_item(); obj.load_item();
// info for exceptions // info for exceptions
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks()); CodeEmitInfo* info_for_exception = state_for(x);
CodeStub* stub; CodeStub* stub;
if (x->is_incompatible_class_change_check()) { if (x->is_incompatible_class_change_check()) {

View File

@ -123,11 +123,9 @@ address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler*
} }
// given the MethodType, find out where the MH argument is buried // given the MethodType, find out where the MH argument is buried
__ movptr(rdx_temp, Address(rax_mtype, __ load_heap_oop(rdx_temp, Address(rax_mtype, __ delayed_value(java_dyn_MethodType::form_offset_in_bytes, rdi_temp)));
__ delayed_value(java_dyn_MethodType::form_offset_in_bytes, rdi_temp)));
Register rdx_vmslots = rdx_temp; Register rdx_vmslots = rdx_temp;
__ movl(rdx_vmslots, Address(rdx_temp, __ movl(rdx_vmslots, Address(rdx_temp, __ delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, rdi_temp)));
__ delayed_value(java_dyn_MethodTypeForm::vmslots_offset_in_bytes, rdi_temp)));
__ movptr(rcx_recv, __ argument_address(rdx_vmslots)); __ movptr(rcx_recv, __ argument_address(rdx_vmslots));
trace_method_handle(_masm, "invokeExact"); trace_method_handle(_masm, "invokeExact");
@ -154,20 +152,18 @@ address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler*
rcx_argslot, rbx_temp, rdx_temp); rcx_argslot, rbx_temp, rdx_temp);
// load up an adapter from the calling type (Java weaves this) // load up an adapter from the calling type (Java weaves this)
__ movptr(rdx_temp, Address(rax_mtype, __ load_heap_oop(rdx_temp, Address(rax_mtype, __ delayed_value(java_dyn_MethodType::form_offset_in_bytes, rdi_temp)));
__ delayed_value(java_dyn_MethodType::form_offset_in_bytes, rdi_temp)));
Register rdx_adapter = rdx_temp; Register rdx_adapter = rdx_temp;
// movptr(rdx_adapter, Address(rdx_temp, java_dyn_MethodTypeForm::genericInvoker_offset_in_bytes())); // __ load_heap_oop(rdx_adapter, Address(rdx_temp, java_dyn_MethodTypeForm::genericInvoker_offset_in_bytes()));
// deal with old JDK versions: // deal with old JDK versions:
__ lea(rdi_temp, Address(rdx_temp, __ lea(rdi_temp, Address(rdx_temp, __ delayed_value(java_dyn_MethodTypeForm::genericInvoker_offset_in_bytes, rdi_temp)));
__ delayed_value(java_dyn_MethodTypeForm::genericInvoker_offset_in_bytes, rdi_temp)));
__ cmpptr(rdi_temp, rdx_temp); __ cmpptr(rdi_temp, rdx_temp);
Label sorry_no_invoke_generic; Label sorry_no_invoke_generic;
__ jccb(Assembler::below, sorry_no_invoke_generic); __ jcc(Assembler::below, sorry_no_invoke_generic);
__ movptr(rdx_adapter, Address(rdi_temp, 0)); __ load_heap_oop(rdx_adapter, Address(rdi_temp, 0));
__ testptr(rdx_adapter, rdx_adapter); __ testptr(rdx_adapter, rdx_adapter);
__ jccb(Assembler::zero, sorry_no_invoke_generic); __ jcc(Assembler::zero, sorry_no_invoke_generic);
__ movptr(Address(rcx_argslot, 1 * Interpreter::stackElementSize), rdx_adapter); __ movptr(Address(rcx_argslot, 1 * Interpreter::stackElementSize), rdx_adapter);
// As a trusted first argument, pass the type being called, so the adapter knows // As a trusted first argument, pass the type being called, so the adapter knows
// the actual types of the arguments and return values. // the actual types of the arguments and return values.
@ -346,7 +342,7 @@ void trace_method_handle_stub(const char* adaptername,
if (stack_dump_count > 64) stack_dump_count = 48; if (stack_dump_count > 64) stack_dump_count = 48;
for (i = 0; i < stack_dump_count; i += 4) { for (i = 0; i < stack_dump_count; i += 4) {
printf(" dump at SP[%d] "INTPTR_FORMAT": "INTPTR_FORMAT" "INTPTR_FORMAT" "INTPTR_FORMAT" "INTPTR_FORMAT"\n", printf(" dump at SP[%d] "INTPTR_FORMAT": "INTPTR_FORMAT" "INTPTR_FORMAT" "INTPTR_FORMAT" "INTPTR_FORMAT"\n",
i, &entry_sp[i+0], entry_sp[i+0], entry_sp[i+1], entry_sp[i+2], entry_sp[i+3]); i, (intptr_t) &entry_sp[i+0], entry_sp[i+0], entry_sp[i+1], entry_sp[i+2], entry_sp[i+3]);
} }
print_method_handle(mh); print_method_handle(mh);
} }
@ -431,7 +427,6 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
} }
address interp_entry = __ pc(); address interp_entry = __ pc();
if (UseCompressedOops) __ unimplemented("UseCompressedOops");
trace_method_handle(_masm, entry_name(ek)); trace_method_handle(_masm, entry_name(ek));
@ -489,7 +484,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
case _invokespecial_mh: case _invokespecial_mh:
{ {
Register rbx_method = rbx_temp; Register rbx_method = rbx_temp;
__ movptr(rbx_method, rcx_mh_vmtarget); // target is a methodOop __ load_heap_oop(rbx_method, rcx_mh_vmtarget); // target is a methodOop
__ verify_oop(rbx_method); __ verify_oop(rbx_method);
// same as TemplateTable::invokestatic or invokespecial, // same as TemplateTable::invokestatic or invokespecial,
// minus the CP setup and profiling: // minus the CP setup and profiling:
@ -546,8 +541,8 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ load_method_handle_vmslots(rax_argslot, rcx_recv, rdx_temp); __ load_method_handle_vmslots(rax_argslot, rcx_recv, rdx_temp);
Register rdx_intf = rdx_temp; Register rdx_intf = rdx_temp;
Register rbx_index = rbx_temp; Register rbx_index = rbx_temp;
__ movptr(rdx_intf, rcx_mh_vmtarget); __ load_heap_oop(rdx_intf, rcx_mh_vmtarget);
__ movl(rbx_index, rcx_dmh_vmindex); __ movl(rbx_index, rcx_dmh_vmindex);
__ movptr(rcx_recv, __ argument_address(rax_argslot, -1)); __ movptr(rcx_recv, __ argument_address(rax_argslot, -1));
__ null_check(rcx_recv, oopDesc::klass_offset_in_bytes()); __ null_check(rcx_recv, oopDesc::klass_offset_in_bytes());
@ -602,7 +597,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
rax_argslot, rbx_temp, rdx_temp); rax_argslot, rbx_temp, rdx_temp);
// store bound argument into the new stack slot: // store bound argument into the new stack slot:
__ movptr(rbx_temp, rcx_bmh_argument); __ load_heap_oop(rbx_temp, rcx_bmh_argument);
Address prim_value_addr(rbx_temp, java_lang_boxing_object::value_offset_in_bytes(arg_type)); Address prim_value_addr(rbx_temp, java_lang_boxing_object::value_offset_in_bytes(arg_type));
if (arg_type == T_OBJECT) { if (arg_type == T_OBJECT) {
__ movptr(Address(rax_argslot, 0), rbx_temp); __ movptr(Address(rax_argslot, 0), rbx_temp);
@ -620,11 +615,11 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
if (direct_to_method) { if (direct_to_method) {
Register rbx_method = rbx_temp; Register rbx_method = rbx_temp;
__ movptr(rbx_method, rcx_mh_vmtarget); __ load_heap_oop(rbx_method, rcx_mh_vmtarget);
__ verify_oop(rbx_method); __ verify_oop(rbx_method);
__ jmp(rbx_method_fie); __ jmp(rbx_method_fie);
} else { } else {
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ verify_oop(rcx_recv); __ verify_oop(rcx_recv);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
@ -634,7 +629,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
case _adapter_retype_only: case _adapter_retype_only:
case _adapter_retype_raw: case _adapter_retype_raw:
// immediately jump to the next MH layer: // immediately jump to the next MH layer:
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ verify_oop(rcx_recv); __ verify_oop(rcx_recv);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
// This is OK when all parameter types widen. // This is OK when all parameter types widen.
@ -651,13 +646,13 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
vmarg = __ argument_address(rax_argslot); vmarg = __ argument_address(rax_argslot);
// What class are we casting to? // What class are we casting to?
__ movptr(rbx_klass, rcx_amh_argument); // this is a Class object! __ load_heap_oop(rbx_klass, rcx_amh_argument); // this is a Class object!
__ movptr(rbx_klass, Address(rbx_klass, java_lang_Class::klass_offset_in_bytes())); __ load_heap_oop(rbx_klass, Address(rbx_klass, java_lang_Class::klass_offset_in_bytes()));
Label done; Label done;
__ movptr(rdx_temp, vmarg); __ movptr(rdx_temp, vmarg);
__ testptr(rdx_temp, rdx_temp); __ testptr(rdx_temp, rdx_temp);
__ jccb(Assembler::zero, done); // no cast if null __ jcc(Assembler::zero, done); // no cast if null
__ load_klass(rdx_temp, rdx_temp); __ load_klass(rdx_temp, rdx_temp);
// live at this point: // live at this point:
@ -672,14 +667,15 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ movl(rax_argslot, rcx_amh_vmargslot); // reload argslot field __ movl(rax_argslot, rcx_amh_vmargslot); // reload argslot field
__ movptr(rdx_temp, vmarg); __ movptr(rdx_temp, vmarg);
__ pushptr(rcx_amh_argument); // required class __ load_heap_oop(rbx_klass, rcx_amh_argument); // required class
__ push(rdx_temp); // bad object __ push(rbx_klass);
__ push((int)Bytecodes::_checkcast); // who is complaining? __ push(rdx_temp); // bad object
__ push((int)Bytecodes::_checkcast); // who is complaining?
__ jump(ExternalAddress(from_interpreted_entry(_raise_exception))); __ jump(ExternalAddress(from_interpreted_entry(_raise_exception)));
__ bind(done); __ bind(done);
// get the new MH: // get the new MH:
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -741,7 +737,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
assert(CONV_VMINFO_SHIFT == 0, "preshifted"); assert(CONV_VMINFO_SHIFT == 0, "preshifted");
// get the new MH: // get the new MH:
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
// (now we are done with the old MH) // (now we are done with the old MH)
// original 32-bit vmdata word must be of this form: // original 32-bit vmdata word must be of this form:
@ -816,7 +812,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
ShouldNotReachHere(); ShouldNotReachHere();
} }
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -858,7 +854,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
rax_argslot, rbx_temp, rdx_temp); rax_argslot, rbx_temp, rdx_temp);
} }
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -969,7 +965,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
} }
} }
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -1029,7 +1025,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ pop(rdi); // restore temp __ pop(rdi); // restore temp
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -1052,7 +1048,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
__ pop(rdi); // restore temp __ pop(rdi); // restore temp
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
} }
break; break;
@ -1103,8 +1099,8 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
// Check the array type. // Check the array type.
Register rbx_klass = rbx_temp; Register rbx_klass = rbx_temp;
__ movptr(rbx_klass, rcx_amh_argument); // this is a Class object! __ load_heap_oop(rbx_klass, rcx_amh_argument); // this is a Class object!
__ movptr(rbx_klass, Address(rbx_klass, java_lang_Class::klass_offset_in_bytes())); __ load_heap_oop(rbx_klass, Address(rbx_klass, java_lang_Class::klass_offset_in_bytes()));
Label ok_array_klass, bad_array_klass, bad_array_length; Label ok_array_klass, bad_array_klass, bad_array_length;
__ check_klass_subtype(rdx_array_klass, rbx_klass, rdi, ok_array_klass); __ check_klass_subtype(rdx_array_klass, rbx_klass, rdi, ok_array_klass);
@ -1186,7 +1182,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
// Arguments are spread. Move to next method handle. // Arguments are spread. Move to next method handle.
UNPUSH_RSI_RDI; UNPUSH_RSI_RDI;
__ movptr(rcx_recv, rcx_mh_vmtarget); __ load_heap_oop(rcx_recv, rcx_mh_vmtarget);
__ jump_to_method_handle_entry(rcx_recv, rdx_temp); __ jump_to_method_handle_entry(rcx_recv, rdx_temp);
__ bind(bad_array_klass); __ bind(bad_array_klass);

View File

@ -35,7 +35,7 @@ enum platform_dependent_constants {
// MethodHandles adapters // MethodHandles adapters
enum method_handles_platform_dependent_constants { enum method_handles_platform_dependent_constants {
method_handles_adapters_code_size = 26000 method_handles_adapters_code_size = 40000
}; };
class x86 { class x86 {

View File

@ -3111,19 +3111,22 @@ void TemplateTable::invokedynamic(int byte_no) {
// rax: CallSite object (f1) // rax: CallSite object (f1)
// rbx: unused (f2) // rbx: unused (f2)
// rcx: receiver address
// rdx: flags (unused) // rdx: flags (unused)
Register rax_callsite = rax;
Register rcx_method_handle = rcx;
if (ProfileInterpreter) { if (ProfileInterpreter) {
Label L;
// %%% should make a type profile for any invokedynamic that takes a ref argument // %%% should make a type profile for any invokedynamic that takes a ref argument
// profile this call // profile this call
__ profile_call(rsi); __ profile_call(rsi);
} }
__ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx))); __ movptr(rcx_method_handle, Address(rax_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
__ null_check(rcx); __ null_check(rcx_method_handle);
__ prepare_to_jump_from_interpreted(); __ prepare_to_jump_from_interpreted();
__ jump_to_method_handle_entry(rcx, rdx); __ jump_to_method_handle_entry(rcx_method_handle, rdx);
} }
//---------------------------------------------------------------------------------------------------- //----------------------------------------------------------------------------------------------------

View File

@ -3120,17 +3120,19 @@ void TemplateTable::invokedynamic(int byte_no) {
// rcx: receiver address // rcx: receiver address
// rdx: flags (unused) // rdx: flags (unused)
Register rax_callsite = rax;
Register rcx_method_handle = rcx;
if (ProfileInterpreter) { if (ProfileInterpreter) {
Label L;
// %%% should make a type profile for any invokedynamic that takes a ref argument // %%% should make a type profile for any invokedynamic that takes a ref argument
// profile this call // profile this call
__ profile_call(r13); __ profile_call(r13);
} }
__ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx))); __ load_heap_oop(rcx_method_handle, Address(rax_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
__ null_check(rcx); __ null_check(rcx_method_handle);
__ prepare_to_jump_from_interpreted(); __ prepare_to_jump_from_interpreted();
__ jump_to_method_handle_entry(rcx, rdx); __ jump_to_method_handle_entry(rcx_method_handle, rdx);
} }

View File

@ -7349,43 +7349,6 @@ instruct bytes_reverse_short(rRegI dst) %{
ins_pipe( ialu_reg ); ins_pipe( ialu_reg );
%} %}
instruct loadI_reversed(rRegI dst, memory src) %{
match(Set dst (ReverseBytesI (LoadI src)));
format %{ "bswap_movl $dst, $src" %}
opcode(0x8B, 0x0F, 0xC8); /* Opcode 8B 0F C8 */
ins_encode(REX_reg_mem(dst, src), OpcP, reg_mem(dst, src), REX_reg(dst), OpcS, opc3_reg(dst));
ins_pipe( ialu_reg_mem );
%}
instruct loadL_reversed(rRegL dst, memory src) %{
match(Set dst (ReverseBytesL (LoadL src)));
format %{ "bswap_movq $dst, $src" %}
opcode(0x8B, 0x0F, 0xC8); /* Opcode 8B 0F C8 */
ins_encode(REX_reg_mem_wide(dst, src), OpcP, reg_mem(dst, src), REX_reg_wide(dst), OpcS, opc3_reg(dst));
ins_pipe( ialu_reg_mem );
%}
instruct storeI_reversed(memory dst, rRegI src) %{
match(Set dst (StoreI dst (ReverseBytesI src)));
format %{ "movl_bswap $dst, $src" %}
opcode(0x0F, 0xC8, 0x89); /* Opcode 0F C8 89 */
ins_encode( REX_reg(src), OpcP, opc2_reg(src), REX_reg_mem(src, dst), OpcT, reg_mem(src, dst) );
ins_pipe( ialu_mem_reg );
%}
instruct storeL_reversed(memory dst, rRegL src) %{
match(Set dst (StoreL dst (ReverseBytesL src)));
format %{ "movq_bswap $dst, $src" %}
opcode(0x0F, 0xC8, 0x89); /* Opcode 0F C8 89 */
ins_encode( REX_reg_wide(src), OpcP, opc2_reg(src), REX_reg_mem_wide(src, dst), OpcT, reg_mem(src, dst) );
ins_pipe( ialu_mem_reg );
%}
//---------- Zeros Count Instructions ------------------------------------------ //---------- Zeros Count Instructions ------------------------------------------
instruct countLeadingZerosI(rRegI dst, rRegI src, rFlagsReg cr) %{ instruct countLeadingZerosI(rRegI dst, rRegI src, rFlagsReg cr) %{

View File

@ -92,15 +92,15 @@ class SignatureHandlerGenerator : public SignatureHandlerGeneratorBase {
public: public:
SignatureHandlerGenerator(methodHandle method, CodeBuffer* buffer) SignatureHandlerGenerator(methodHandle method, CodeBuffer* buffer)
: SignatureHandlerGeneratorBase(method, (ffi_cif *) buffer->code_end()), : SignatureHandlerGeneratorBase(method, (ffi_cif *) buffer->insts_end()),
_cb(buffer) { _cb(buffer) {
_cb->set_code_end((address) (cif() + 1)); _cb->set_insts_end((address) (cif() + 1));
} }
private: private:
void push(intptr_t value) { void push(intptr_t value) {
intptr_t *dst = (intptr_t *) _cb->code_end(); intptr_t *dst = (intptr_t *) _cb->insts_end();
_cb->set_code_end((address) (dst + 1)); _cb->set_insts_end((address) (dst + 1));
*dst = value; *dst = value;
} }
}; };

View File

@ -168,8 +168,8 @@ class CodeSection VALUE_OBJ_CLASS_SPEC {
bool allocates(address pc) const { return pc >= _start && pc < _limit; } bool allocates(address pc) const { return pc >= _start && pc < _limit; }
bool allocates2(address pc) const { return pc >= _start && pc <= _limit; } bool allocates2(address pc) const { return pc >= _start && pc <= _limit; }
void set_end(address pc) { assert(allocates2(pc),""); _end = pc; } void set_end(address pc) { assert(allocates2(pc), err_msg("not in CodeBuffer memory: " PTR_FORMAT " <= " PTR_FORMAT " <= " PTR_FORMAT, _start, pc, _limit)); _end = pc; }
void set_mark(address pc) { assert(contains2(pc),"not in codeBuffer"); void set_mark(address pc) { assert(contains2(pc), "not in codeBuffer");
_mark = pc; } _mark = pc; }
void set_mark_off(int offset) { assert(contains2(offset+_start),"not in codeBuffer"); void set_mark_off(int offset) { assert(contains2(offset+_start),"not in codeBuffer");
_mark = offset + _start; } _mark = offset + _start; }

View File

@ -174,31 +174,6 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
int index; int index;
Value value; Value value;
if (state->stack_size() > 0) {
print_begin("stack");
print("size %d", state->stack_size());
for_each_stack_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("stack");
}
if (state->locks_size() > 0) {
print_begin("locks");
print("size %d", state->locks_size());
for_each_lock_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("locks");
}
for_each_state(state) { for_each_state(state) {
print_begin("locals"); print_begin("locals");
print("size %d", state->locals_size()); print("size %d", state->locals_size());
@ -210,6 +185,33 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
output()->cr(); output()->cr();
} }
print_end("locals"); print_end("locals");
if (state->stack_size() > 0) {
print_begin("stack");
print("size %d", state->stack_size());
print("method \"%s\"", method_name(state->scope()->method()));
for_each_stack_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("stack");
}
if (state->locks_size() > 0) {
print_begin("locks");
print("size %d", state->locks_size());
print("method \"%s\"", method_name(state->scope()->method()));
for_each_lock_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("locks");
}
} }
print_end("states"); print_end("states");
@ -230,7 +232,8 @@ void CFGPrinterOutput::print_HIR(Value instr) {
if (instr->is_pinned()) { if (instr->is_pinned()) {
output()->put('.'); output()->put('.');
} }
output()->print("%d %d ", instr->bci(), instr->use_count());
output()->print("%d %d ", instr->printable_bci(), instr->use_count());
print_operand(instr); print_operand(instr);
@ -271,7 +274,7 @@ void CFGPrinterOutput::print_block(BlockBegin* block) {
print("name \"B%d\"", block->block_id()); print("name \"B%d\"", block->block_id());
print("from_bci %d", block->bci()); print("from_bci %d", block->bci());
print("to_bci %d", (block->end() == NULL ? -1 : block->end()->bci())); print("to_bci %d", (block->end() == NULL ? -1 : block->end()->printable_bci()));
output()->indent(); output()->indent();
output()->print("predecessors "); output()->print("predecessors ");

View File

@ -205,7 +205,7 @@ void Canonicalizer::do_StoreField (StoreField* x) {
// limit this optimization to current block // limit this optimization to current block
if (value != NULL && in_current_block(conv)) { if (value != NULL && in_current_block(conv)) {
set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(), set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(),
x->lock_stack(), x->state_before(), x->is_loaded(), x->is_initialized())); x->state_before(), x->is_loaded(), x->is_initialized()));
return; return;
} }
} }
@ -256,7 +256,7 @@ void Canonicalizer::do_StoreIndexed (StoreIndexed* x) {
// limit this optimization to current block // limit this optimization to current block
if (value != NULL && in_current_block(conv)) { if (value != NULL && in_current_block(conv)) {
set_canonical(new StoreIndexed(x->array(), x->index(), x->length(), set_canonical(new StoreIndexed(x->array(), x->index(), x->length(),
x->elt_type(), value, x->lock_stack())); x->elt_type(), value, x->state_before()));
return; return;
} }
} }
@ -667,7 +667,7 @@ void Canonicalizer::do_If(If* x) {
} }
} }
set_canonical(canon); set_canonical(canon);
set_bci(cmp->bci()); set_bci(cmp->state_before()->bci());
} }
} }
} else if (l->as_InstanceOf() != NULL) { } else if (l->as_InstanceOf() != NULL) {
@ -685,7 +685,7 @@ void Canonicalizer::do_If(If* x) {
set_canonical(new Goto(is_inst_sux, x->state_before(), x->is_safepoint())); set_canonical(new Goto(is_inst_sux, x->state_before(), x->is_safepoint()));
} else { } else {
// successors differ => simplify to: IfInstanceOf // successors differ => simplify to: IfInstanceOf
set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->bci(), is_inst_sux, no_inst_sux)); set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->state_before()->bci(), is_inst_sux, no_inst_sux));
} }
} }
} else if (rt == objectNull && (l->as_NewInstance() || l->as_NewArray())) { } else if (rt == objectNull && (l->as_NewInstance() || l->as_NewArray())) {

View File

@ -22,7 +22,6 @@
* *
*/ */
class BlockBegin;
class CompilationResourceObj; class CompilationResourceObj;
class XHandlers; class XHandlers;
class ExceptionInfo; class ExceptionInfo;

File diff suppressed because it is too large Load Diff

View File

@ -58,9 +58,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// BlockEnds. // BlockEnds.
BlockBegin* _continuation; BlockBegin* _continuation;
// Without return value of inlined method on stack
ValueStack* _continuation_state;
// Was this ScopeData created only for the parsing and inlining of // Was this ScopeData created only for the parsing and inlining of
// a jsr? // a jsr?
bool _parsing_jsr; bool _parsing_jsr;
@ -125,14 +122,10 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void set_stream(ciBytecodeStream* stream) { _stream = stream; } void set_stream(ciBytecodeStream* stream) { _stream = stream; }
intx max_inline_size() const { return _max_inline_size; } intx max_inline_size() const { return _max_inline_size; }
int caller_stack_size() const;
BlockBegin* continuation() const { return _continuation; } BlockBegin* continuation() const { return _continuation; }
void set_continuation(BlockBegin* cont) { _continuation = cont; } void set_continuation(BlockBegin* cont) { _continuation = cont; }
ValueStack* continuation_state() const { return _continuation_state; }
void set_continuation_state(ValueStack* s) { _continuation_state = s; }
// Indicates whether this ScopeData was pushed only for the // Indicates whether this ScopeData was pushed only for the
// parsing and inlining of a jsr // parsing and inlining of a jsr
bool parsing_jsr() const { return _parsing_jsr; } bool parsing_jsr() const { return _parsing_jsr; }
@ -163,7 +156,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// for all GraphBuilders // for all GraphBuilders
static bool _can_trap[Bytecodes::number_of_java_codes]; static bool _can_trap[Bytecodes::number_of_java_codes];
static bool _is_async[Bytecodes::number_of_java_codes];
// for each instance of GraphBuilder // for each instance of GraphBuilder
ScopeData* _scope_data; // Per-scope data; used for inlining ScopeData* _scope_data; // Per-scope data; used for inlining
@ -179,7 +171,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// for each call to connect_to_end; can also be set by inliner // for each call to connect_to_end; can also be set by inliner
BlockBegin* _block; // the current block BlockBegin* _block; // the current block
ValueStack* _state; // the current execution state ValueStack* _state; // the current execution state
ValueStack* _exception_state; // state that will be used by handle_exception
Instruction* _last; // the last instruction added Instruction* _last; // the last instruction added
bool _skip_block; // skip processing of the rest of this block bool _skip_block; // skip processing of the rest of this block
@ -194,8 +185,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
ValueStack* state() const { return _state; } ValueStack* state() const { return _state; }
void set_state(ValueStack* state) { _state = state; } void set_state(ValueStack* state) { _state = state; }
IRScope* scope() const { return scope_data()->scope(); } IRScope* scope() const { return scope_data()->scope(); }
ValueStack* exception_state() const { return _exception_state; }
void set_exception_state(ValueStack* s) { _exception_state = s; }
ciMethod* method() const { return scope()->method(); } ciMethod* method() const { return scope()->method(); }
ciBytecodeStream* stream() const { return scope_data()->stream(); } ciBytecodeStream* stream() const { return scope_data()->stream(); }
Instruction* last() const { return _last; } Instruction* last() const { return _last; }
@ -230,7 +219,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void load_indexed (BasicType type); void load_indexed (BasicType type);
void store_indexed(BasicType type); void store_indexed(BasicType type);
void stack_op(Bytecodes::Code code); void stack_op(Bytecodes::Code code);
void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* lock_stack = NULL); void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before = NULL);
void negate_op(ValueType* type); void negate_op(ValueType* type);
void shift_op(ValueType* type, Bytecodes::Code code); void shift_op(ValueType* type, Bytecodes::Code code);
void logic_op(ValueType* type, Bytecodes::Code code); void logic_op(ValueType* type, Bytecodes::Code code);
@ -267,12 +256,8 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
Instruction* append_split(StateSplit* instr); Instruction* append_split(StateSplit* instr);
// other helpers // other helpers
static bool is_async(Bytecodes::Code code) {
assert(0 <= code && code < Bytecodes::number_of_java_codes, "illegal bytecode");
return _is_async[code];
}
BlockBegin* block_at(int bci) { return scope_data()->block_at(bci); } BlockBegin* block_at(int bci) { return scope_data()->block_at(bci); }
XHandlers* handle_exception(int bci); XHandlers* handle_exception(Instruction* instruction);
void connect_to_end(BlockBegin* beg); void connect_to_end(BlockBegin* beg);
void null_check(Value value); void null_check(Value value);
void eliminate_redundant_phis(BlockBegin* start); void eliminate_redundant_phis(BlockBegin* start);
@ -283,7 +268,28 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void kill_all(); void kill_all();
ValueStack* lock_stack(); // use of state copy routines (try to minimize unnecessary state
// object allocations):
// - if the instruction unconditionally needs a full copy of the
// state (for patching for example), then use copy_state_before*
// - if the instruction needs a full copy of the state only for
// handler generation (Instruction::needs_exception_state() returns
// false) then use copy_state_exhandling*
// - if the instruction needs either a full copy of the state for
// handler generation and a least a minimal copy of the state (as
// returned by Instruction::exception_state()) for debug info
// generation (that is when Instruction::needs_exception_state()
// returns true) then use copy_state_for_exception*
ValueStack* copy_state_before_with_bci(int bci);
ValueStack* copy_state_before();
ValueStack* copy_state_exhandling_with_bci(int bci);
ValueStack* copy_state_exhandling();
ValueStack* copy_state_for_exception_with_bci(int bci);
ValueStack* copy_state_for_exception();
// //
// Inlining support // Inlining support
@ -292,9 +298,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// accessors // accessors
bool parsing_jsr() const { return scope_data()->parsing_jsr(); } bool parsing_jsr() const { return scope_data()->parsing_jsr(); }
BlockBegin* continuation() const { return scope_data()->continuation(); } BlockBegin* continuation() const { return scope_data()->continuation(); }
ValueStack* continuation_state() const { return scope_data()->continuation_state(); }
BlockBegin* jsr_continuation() const { return scope_data()->jsr_continuation(); } BlockBegin* jsr_continuation() const { return scope_data()->jsr_continuation(); }
int caller_stack_size() const { return scope_data()->caller_stack_size(); }
void set_continuation(BlockBegin* continuation) { scope_data()->set_continuation(continuation); } void set_continuation(BlockBegin* continuation) { scope_data()->set_continuation(continuation); }
void set_inline_cleanup_info(BlockBegin* block, void set_inline_cleanup_info(BlockBegin* block,
Instruction* return_prev, Instruction* return_prev,
@ -343,7 +347,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
NOT_PRODUCT(void print_inline_result(ciMethod* callee, bool res);) NOT_PRODUCT(void print_inline_result(ciMethod* callee, bool res);)
void profile_call(Value recv, ciKlass* predicted_holder); void profile_call(Value recv, ciKlass* predicted_holder);
void profile_invocation(ciMethod* inlinee, ValueStack* state, int bci); void profile_invocation(ciMethod* inlinee, ValueStack* state);
// Shortcuts to profiling control. // Shortcuts to profiling control.
bool is_profiling() { return _compilation->is_profiling(); } bool is_profiling() { return _compilation->is_profiling(); }

View File

@ -116,24 +116,6 @@ bool XHandler::equals(XHandler* other) const {
// Implementation of IRScope // Implementation of IRScope
BlockBegin* IRScope::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
if (entry == NULL) return NULL;
assert(entry->is_set(f), "entry/flag mismatch");
// create header block
BlockBegin* h = new BlockBegin(entry->bci());
BlockEnd* g = new Goto(entry, false);
h->set_next(g, entry->bci());
h->set_end(g);
h->set(f);
// setup header block end state
ValueStack* s = state->copy(); // can use copy since stack is empty (=> no phis)
assert(s->stack_is_empty(), "must have empty stack at entry point");
g->set_state(s);
return h;
}
BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) { BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
GraphBuilder gm(compilation, this); GraphBuilder gm(compilation, this);
NOT_PRODUCT(if (PrintValueNumbering && Verbose) gm.print_stats()); NOT_PRODUCT(if (PrintValueNumbering && Verbose) gm.print_stats());
@ -145,12 +127,9 @@ BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
IRScope::IRScope(Compilation* compilation, IRScope* caller, int caller_bci, ciMethod* method, int osr_bci, bool create_graph) IRScope::IRScope(Compilation* compilation, IRScope* caller, int caller_bci, ciMethod* method, int osr_bci, bool create_graph)
: _callees(2) : _callees(2)
, _compilation(compilation) , _compilation(compilation)
, _lock_stack_size(-1)
, _requires_phi_function(method->max_locals()) , _requires_phi_function(method->max_locals())
{ {
_caller = caller; _caller = caller;
_caller_bci = caller == NULL ? -1 : caller_bci;
_caller_state = NULL; // Must be set later if needed
_level = caller == NULL ? 0 : caller->level() + 1; _level = caller == NULL ? 0 : caller->level() + 1;
_method = method; _method = method;
_xhandlers = new XHandlers(method); _xhandlers = new XHandlers(method);
@ -182,32 +161,6 @@ int IRScope::max_stack() const {
} }
void IRScope::compute_lock_stack_size() {
if (!InlineMethodsWithExceptionHandlers) {
_lock_stack_size = 0;
return;
}
// Figure out whether we have to preserve expression stack elements
// for parent scopes, and if so, how many
IRScope* cur_scope = this;
while (cur_scope != NULL && !cur_scope->xhandlers()->has_handlers()) {
cur_scope = cur_scope->caller();
}
_lock_stack_size = (cur_scope == NULL ? 0 :
(cur_scope->caller_state() == NULL ? 0 :
cur_scope->caller_state()->stack_size()));
}
int IRScope::top_scope_bci() const {
assert(!is_top_scope(), "no correct answer for top scope possible");
const IRScope* scope = this;
while (!scope->caller()->is_top_scope()) {
scope = scope->caller();
}
return scope->caller_bci();
}
bool IRScopeDebugInfo::should_reexecute() { bool IRScopeDebugInfo::should_reexecute() {
ciMethod* cur_method = scope()->method(); ciMethod* cur_method = scope()->method();
int cur_bci = bci(); int cur_bci = bci();
@ -222,37 +175,24 @@ bool IRScopeDebugInfo::should_reexecute() {
// Implementation of CodeEmitInfo // Implementation of CodeEmitInfo
// Stack must be NON-null // Stack must be NON-null
CodeEmitInfo::CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers) CodeEmitInfo::CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers)
: _scope(stack->scope()) : _scope(stack->scope())
, _bci(bci)
, _scope_debug_info(NULL) , _scope_debug_info(NULL)
, _oop_map(NULL) , _oop_map(NULL)
, _stack(stack) , _stack(stack)
, _exception_handlers(exception_handlers) , _exception_handlers(exception_handlers)
, _next(NULL)
, _id(-1)
, _is_method_handle_invoke(false) { , _is_method_handle_invoke(false) {
assert(_stack != NULL, "must be non null"); assert(_stack != NULL, "must be non null");
assert(_bci == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(_bci)), "make sure bci points at a real bytecode");
} }
CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only) CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack)
: _scope(info->_scope) : _scope(info->_scope)
, _exception_handlers(NULL) , _exception_handlers(NULL)
, _bci(info->_bci)
, _scope_debug_info(NULL) , _scope_debug_info(NULL)
, _oop_map(NULL) , _oop_map(NULL)
, _stack(stack == NULL ? info->_stack : stack)
, _is_method_handle_invoke(info->_is_method_handle_invoke) { , _is_method_handle_invoke(info->_is_method_handle_invoke) {
if (lock_stack_only) {
if (info->_stack != NULL) {
_stack = info->_stack->copy_locks();
} else {
_stack = NULL;
}
} else {
_stack = info->_stack;
}
// deep copy of exception handlers // deep copy of exception handlers
if (info->_exception_handlers != NULL) { if (info->_exception_handlers != NULL) {
@ -273,8 +213,6 @@ void CodeEmitInfo::add_register_oop(LIR_Opr opr) {
assert(_oop_map != NULL, "oop map must already exist"); assert(_oop_map != NULL, "oop map must already exist");
assert(opr->is_single_cpu(), "should not call otherwise"); assert(opr->is_single_cpu(), "should not call otherwise");
int frame_size = frame_map()->framesize();
int arg_count = frame_map()->oop_map_arg_count();
VMReg name = frame_map()->regname(opr); VMReg name = frame_map()->regname(opr);
_oop_map->set_oop(name); _oop_map->set_oop(name);
} }
@ -383,8 +321,7 @@ class UseCountComputer: public ValueVisitor, BlockClosure {
void visit(Value* n) { void visit(Value* n) {
// Local instructions and Phis for expression stack values at the // Local instructions and Phis for expression stack values at the
// start of basic blocks are not added to the instruction list // start of basic blocks are not added to the instruction list
if ((*n)->bci() == -99 && (*n)->as_Local() == NULL && if (!(*n)->is_linked()&& (*n)->can_be_linked()) {
(*n)->as_Phi() == NULL) {
assert(false, "a node was not appended to the graph"); assert(false, "a node was not appended to the graph");
Compilation::current()->bailout("a node was not appended to the graph"); Compilation::current()->bailout("a node was not appended to the graph");
} }
@ -1338,7 +1275,7 @@ void SubstitutionResolver::block_do(BlockBegin* block) {
// need to remove this instruction from the instruction stream // need to remove this instruction from the instruction stream
if (n->subst() != n) { if (n->subst() != n) {
assert(last != NULL, "must have last"); assert(last != NULL, "must have last");
last->set_next(n->next(), n->next()->bci()); last->set_next(n->next());
} else { } else {
last = n; last = n;
} }

View File

@ -132,8 +132,6 @@ class IRScope: public CompilationResourceObj {
// hierarchy // hierarchy
Compilation* _compilation; // the current compilation Compilation* _compilation; // the current compilation
IRScope* _caller; // the caller scope, or NULL IRScope* _caller; // the caller scope, or NULL
int _caller_bci; // the caller bci of the corresponding (inlined) invoke, or < 0
ValueStack* _caller_state; // the caller state, or NULL
int _level; // the inlining level int _level; // the inlining level
ciMethod* _method; // the corresponding method ciMethod* _method; // the corresponding method
IRScopeList _callees; // the inlined method scopes IRScopeList _callees; // the inlined method scopes
@ -144,15 +142,9 @@ class IRScope: public CompilationResourceObj {
bool _monitor_pairing_ok; // the monitor pairing info bool _monitor_pairing_ok; // the monitor pairing info
BlockBegin* _start; // the start block, successsors are method entries BlockBegin* _start; // the start block, successsors are method entries
// lock stack management
int _lock_stack_size; // number of expression stack elements which, if present,
// must be spilled to the stack because of exception
// handling inside inlined methods
BitMap _requires_phi_function; // bit is set if phi functions at loop headers are necessary for a local variable BitMap _requires_phi_function; // bit is set if phi functions at loop headers are necessary for a local variable
// helper functions // helper functions
BlockBegin* header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state);
BlockBegin* build_graph(Compilation* compilation, int osr_bci); BlockBegin* build_graph(Compilation* compilation, int osr_bci);
public: public:
@ -162,33 +154,16 @@ class IRScope: public CompilationResourceObj {
// accessors // accessors
Compilation* compilation() const { return _compilation; } Compilation* compilation() const { return _compilation; }
IRScope* caller() const { return _caller; } IRScope* caller() const { return _caller; }
int caller_bci() const { return _caller_bci; }
ValueStack* caller_state() const { return _caller_state; }
int level() const { return _level; } int level() const { return _level; }
ciMethod* method() const { return _method; } ciMethod* method() const { return _method; }
int max_stack() const; // NOTE: expensive int max_stack() const; // NOTE: expensive
int lock_stack_size() const {
assert(_lock_stack_size != -1, "uninitialized");
return _lock_stack_size;
}
BitMap& requires_phi_function() { return _requires_phi_function; } BitMap& requires_phi_function() { return _requires_phi_function; }
// mutators
// Needed because caller state is not ready at time of IRScope construction
void set_caller_state(ValueStack* state) { _caller_state = state; }
// Needed because caller state changes after IRScope construction.
// Computes number of expression stack elements whose state must be
// preserved in the case of an exception; these may be seen by
// caller scopes. Zero when inlining of methods containing exception
// handlers is disabled, otherwise a conservative approximation.
void compute_lock_stack_size();
// hierarchy // hierarchy
bool is_top_scope() const { return _caller == NULL; } bool is_top_scope() const { return _caller == NULL; }
void add_callee(IRScope* callee) { _callees.append(callee); } void add_callee(IRScope* callee) { _callees.append(callee); }
int number_of_callees() const { return _callees.length(); } int number_of_callees() const { return _callees.length(); }
IRScope* callee_no(int i) const { return _callees.at(i); } IRScope* callee_no(int i) const { return _callees.at(i); }
int top_scope_bci() const;
// accessors, graph // accessors, graph
bool is_valid() const { return start() != NULL; } bool is_valid() const { return start() != NULL; }
@ -266,9 +241,6 @@ class CodeEmitInfo: public CompilationResourceObj {
XHandlers* _exception_handlers; XHandlers* _exception_handlers;
OopMap* _oop_map; OopMap* _oop_map;
ValueStack* _stack; // used by deoptimization (contains also monitors ValueStack* _stack; // used by deoptimization (contains also monitors
int _bci;
CodeEmitInfo* _next;
int _id;
bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site. bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site.
FrameMap* frame_map() const { return scope()->compilation()->frame_map(); } FrameMap* frame_map() const { return scope()->compilation()->frame_map(); }
@ -277,23 +249,10 @@ class CodeEmitInfo: public CompilationResourceObj {
public: public:
// use scope from ValueStack // use scope from ValueStack
CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers); CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers);
// used by natives
CodeEmitInfo(IRScope* scope, int bci)
: _scope(scope)
, _bci(bci)
, _oop_map(NULL)
, _scope_debug_info(NULL)
, _stack(NULL)
, _exception_handlers(NULL)
, _next(NULL)
, _id(-1)
, _is_method_handle_invoke(false) {
}
// make a copy // make a copy
CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only = false); CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack = NULL);
// accessors // accessors
OopMap* oop_map() { return _oop_map; } OopMap* oop_map() { return _oop_map; }
@ -301,17 +260,10 @@ class CodeEmitInfo: public CompilationResourceObj {
IRScope* scope() const { return _scope; } IRScope* scope() const { return _scope; }
XHandlers* exception_handlers() const { return _exception_handlers; } XHandlers* exception_handlers() const { return _exception_handlers; }
ValueStack* stack() const { return _stack; } ValueStack* stack() const { return _stack; }
int bci() const { return _bci; }
void add_register_oop(LIR_Opr opr); void add_register_oop(LIR_Opr opr);
void record_debug_info(DebugInformationRecorder* recorder, int pc_offset); void record_debug_info(DebugInformationRecorder* recorder, int pc_offset);
CodeEmitInfo* next() const { return _next; }
void set_next(CodeEmitInfo* next) { _next = next; }
int id() const { return _id; }
void set_id(int id) { _id = id; }
bool is_method_handle_invoke() const { return _is_method_handle_invoke; } bool is_method_handle_invoke() const { return _is_method_handle_invoke; }
void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; } void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; }
}; };

View File

@ -29,13 +29,6 @@
// Implementation of Instruction // Implementation of Instruction
#ifdef ASSERT
void Instruction::create_hi_word() {
assert(type()->is_double_word() && _hi_word == NULL, "only double word has high word");
_hi_word = new HiWord(this);
}
#endif
Instruction::Condition Instruction::mirror(Condition cond) { Instruction::Condition Instruction::mirror(Condition cond) {
switch (cond) { switch (cond) {
case eql: return eql; case eql: return eql;
@ -63,6 +56,15 @@ Instruction::Condition Instruction::negate(Condition cond) {
return eql; return eql;
} }
void Instruction::update_exception_state(ValueStack* state) {
if (state != NULL && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) {
assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->jvmti_can_access_local_variables(), "unexpected state kind");
_exception_state = state;
} else {
_exception_state = NULL;
}
}
Instruction* Instruction::prev(BlockBegin* block) { Instruction* Instruction::prev(BlockBegin* block) {
Instruction* p = NULL; Instruction* p = NULL;
@ -75,7 +77,24 @@ Instruction* Instruction::prev(BlockBegin* block) {
} }
void Instruction::state_values_do(ValueVisitor* f) {
if (state_before() != NULL) {
state_before()->values_do(f);
}
if (exception_state() != NULL){
exception_state()->values_do(f);
}
}
#ifndef PRODUCT #ifndef PRODUCT
void Instruction::check_state(ValueStack* state) {
if (state != NULL) {
state->verify();
}
}
void Instruction::print() { void Instruction::print() {
InstructionPrinter ip; InstructionPrinter ip;
print(ip); print(ip);
@ -190,35 +209,6 @@ ciType* CheckCast::exact_type() const {
return NULL; return NULL;
} }
void ArithmeticOp::other_values_do(ValueVisitor* f) {
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
void NullCheck::other_values_do(ValueVisitor* f) {
lock_stack()->values_do(f);
}
void AccessArray::other_values_do(ValueVisitor* f) {
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of AccessField
void AccessField::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of StoreIndexed
IRScope* StoreIndexed::scope() const {
return lock_stack()->scope();
}
// Implementation of ArithmeticOp // Implementation of ArithmeticOp
bool ArithmeticOp::is_commutative() const { bool ArithmeticOp::is_commutative() const {
@ -266,13 +256,6 @@ bool LogicOp::is_commutative() const {
} }
// Implementation of CompareOp
void CompareOp::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of IfOp // Implementation of IfOp
bool IfOp::is_commutative() const { bool IfOp::is_commutative() const {
@ -301,6 +284,7 @@ IRScope* StateSplit::scope() const {
void StateSplit::state_values_do(ValueVisitor* f) { void StateSplit::state_values_do(ValueVisitor* f) {
Instruction::state_values_do(f);
if (state() != NULL) state()->values_do(f); if (state() != NULL) state()->values_do(f);
} }
@ -316,30 +300,17 @@ void BlockBegin::state_values_do(ValueVisitor* f) {
} }
void MonitorEnter::state_values_do(ValueVisitor* f) {
StateSplit::state_values_do(f);
_lock_stack_before->values_do(f);
}
void Intrinsic::state_values_do(ValueVisitor* f) {
StateSplit::state_values_do(f);
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of Invoke // Implementation of Invoke
Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
int vtable_index, ciMethod* target, ValueStack* state_before) int vtable_index, ciMethod* target, ValueStack* state_before)
: StateSplit(result_type) : StateSplit(result_type, state_before)
, _code(code) , _code(code)
, _recv(recv) , _recv(recv)
, _args(args) , _args(args)
, _vtable_index(vtable_index) , _vtable_index(vtable_index)
, _target(target) , _target(target)
, _state_before(state_before)
{ {
set_flag(TargetIsLoadedFlag, target->is_loaded()); set_flag(TargetIsLoadedFlag, target->is_loaded());
set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method()); set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method());
@ -376,7 +347,7 @@ void Invoke::state_values_do(ValueVisitor* f) {
// Implementation of Contant // Implementation of Contant
intx Constant::hash() const { intx Constant::hash() const {
if (_state == NULL) { if (state_before() == NULL) {
switch (type()->tag()) { switch (type()->tag()) {
case intTag: case intTag:
return HASH2(name(), type()->as_IntConstant()->value()); return HASH2(name(), type()->as_IntConstant()->value());
@ -499,25 +470,6 @@ BlockBegin* Constant::compare(Instruction::Condition cond, Value right,
} }
void Constant::other_values_do(ValueVisitor* f) {
if (state() != NULL) state()->values_do(f);
}
// Implementation of NewArray
void NewArray::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of TypeCheck
void TypeCheck::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of BlockBegin // Implementation of BlockBegin
void BlockBegin::set_end(BlockEnd* end) { void BlockBegin::set_end(BlockEnd* end) {
@ -604,23 +556,14 @@ void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
// of the inserted block, without recomputing the values of the other blocks // of the inserted block, without recomputing the values of the other blocks
// in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless. // in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless.
BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) { BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) {
// Try to make the bci close to a block with a single pred or sux, BlockBegin* new_sux = new BlockBegin(-99);
// since this make the block layout algorithm work better.
int bci = -1;
if (sux->number_of_preds() == 1) {
bci = sux->bci();
} else {
bci = end()->bci();
}
BlockBegin* new_sux = new BlockBegin(bci);
// mark this block (special treatment when block order is computed) // mark this block (special treatment when block order is computed)
new_sux->set(critical_edge_split_flag); new_sux->set(critical_edge_split_flag);
// This goto is not a safepoint. // This goto is not a safepoint.
Goto* e = new Goto(sux, false); Goto* e = new Goto(sux, false);
new_sux->set_next(e, bci); new_sux->set_next(e, end()->state()->bci());
new_sux->set_end(e); new_sux->set_end(e);
// setup states // setup states
ValueStack* s = end()->state(); ValueStack* s = end()->state();
@ -763,7 +706,7 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
} }
// copy state because it is altered // copy state because it is altered
new_state = new_state->copy(); new_state = new_state->copy(ValueStack::BlockBeginState, bci());
// Use method liveness to invalidate dead locals // Use method liveness to invalidate dead locals
MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci()); MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci());
@ -800,19 +743,9 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
// initialize state of block // initialize state of block
set_state(new_state); set_state(new_state);
} else if (existing_state->is_same_across_scopes(new_state)) { } else if (existing_state->is_same(new_state)) {
TRACE_PHI(tty->print_cr("exisiting state found")); TRACE_PHI(tty->print_cr("exisiting state found"));
// Inlining may cause the local state not to match up, so walk up
// the new state until we get to the same scope as the
// existing and then start processing from there.
while (existing_state->scope() != new_state->scope()) {
new_state = new_state->caller_state();
assert(new_state != NULL, "could not match up scopes");
assert(false, "check if this is necessary");
}
assert(existing_state->scope() == new_state->scope(), "not matching"); assert(existing_state->scope() == new_state->scope(), "not matching");
assert(existing_state->locals_size() == new_state->locals_size(), "not matching"); assert(existing_state->locals_size() == new_state->locals_size(), "not matching");
assert(existing_state->stack_size() == new_state->stack_size(), "not matching"); assert(existing_state->stack_size() == new_state->stack_size(), "not matching");
@ -969,11 +902,6 @@ void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
} }
void BlockEnd::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of Phi // Implementation of Phi
// Normal phi functions take their operands from the last instruction of the // Normal phi functions take their operands from the last instruction of the
@ -1006,11 +934,6 @@ int Phi::operand_count() const {
} }
// Implementation of Throw
void Throw::state_values_do(ValueVisitor* f) {
BlockEnd::state_values_do(f);
}
void ProfileInvoke::state_values_do(ValueVisitor* f) { void ProfileInvoke::state_values_do(ValueVisitor* f) {
if (state() != NULL) state()->values_do(f); if (state() != NULL) state()->values_do(f);

View File

@ -38,7 +38,6 @@ typedef LIR_OprDesc* LIR_Opr;
// serve factoring. // serve factoring.
class Instruction; class Instruction;
class HiWord;
class Phi; class Phi;
class Local; class Local;
class Constant; class Constant;
@ -149,7 +148,6 @@ class BlockList: public _BlockList {
class InstructionVisitor: public StackObj { class InstructionVisitor: public StackObj {
public: public:
void do_HiWord (HiWord* x) { ShouldNotReachHere(); }
virtual void do_Phi (Phi* x) = 0; virtual void do_Phi (Phi* x) = 0;
virtual void do_Local (Local* x) = 0; virtual void do_Local (Local* x) = 0;
virtual void do_Constant (Constant* x) = 0; virtual void do_Constant (Constant* x) = 0;
@ -272,7 +270,9 @@ class InstructionVisitor: public StackObj {
class Instruction: public CompilationResourceObj { class Instruction: public CompilationResourceObj {
private: private:
int _id; // the unique instruction id int _id; // the unique instruction id
int _bci; // the instruction bci #ifndef PRODUCT
int _printable_bci; // the bci of the instruction for printing
#endif
int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
int _pin_state; // set of PinReason describing the reason for pinning int _pin_state; // set of PinReason describing the reason for pinning
ValueType* _type; // the instruction value type ValueType* _type; // the instruction value type
@ -281,17 +281,18 @@ class Instruction: public CompilationResourceObj {
LIR_Opr _operand; // LIR specific information LIR_Opr _operand; // LIR specific information
unsigned int _flags; // Flag bits unsigned int _flags; // Flag bits
ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
ValueStack* _exception_state; // Copy of state for exception handling
XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
#ifdef ASSERT
HiWord* _hi_word;
#endif
friend class UseCountComputer; friend class UseCountComputer;
friend class BlockBegin; friend class BlockBegin;
void update_exception_state(ValueStack* state);
bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); }
protected: protected:
void set_bci(int bci) { assert(bci == SynchronizationEntryBCI || bci >= 0, "illegal bci"); _bci = bci; }
void set_type(ValueType* type) { void set_type(ValueType* type) {
assert(type != NULL, "type must exist"); assert(type != NULL, "type must exist");
_type = type; _type = type;
@ -325,6 +326,7 @@ class Instruction: public CompilationResourceObj {
NeedsPatchingFlag, NeedsPatchingFlag,
ThrowIncompatibleClassChangeErrorFlag, ThrowIncompatibleClassChangeErrorFlag,
ProfileMDOFlag, ProfileMDOFlag,
IsLinkedInBlockFlag,
InstructionLastFlag InstructionLastFlag
}; };
@ -356,31 +358,31 @@ class Instruction: public CompilationResourceObj {
} }
// creation // creation
Instruction(ValueType* type, bool type_is_constant = false, bool create_hi = true) Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false)
: _bci(-99) : _use_count(0)
, _use_count(0) #ifndef PRODUCT
, _printable_bci(-99)
#endif
, _pin_state(0) , _pin_state(0)
, _type(type) , _type(type)
, _next(NULL) , _next(NULL)
, _subst(NULL) , _subst(NULL)
, _flags(0) , _flags(0)
, _operand(LIR_OprFact::illegalOpr) , _operand(LIR_OprFact::illegalOpr)
, _state_before(state_before)
, _exception_handlers(NULL) , _exception_handlers(NULL)
#ifdef ASSERT
, _hi_word(NULL)
#endif
{ {
check_state(state_before);
assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
#ifdef ASSERT update_exception_state(_state_before);
if (create_hi && type->is_double_word()) {
create_hi_word();
}
#endif
} }
// accessors // accessors
int id() const { return _id; } int id() const { return _id; }
int bci() const { return _bci; } #ifndef PRODUCT
int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) }
#endif
int use_count() const { return _use_count; } int use_count() const { return _use_count; }
int pin_state() const { return _pin_state; } int pin_state() const { return _pin_state; }
bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
@ -393,9 +395,13 @@ class Instruction: public CompilationResourceObj {
void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
bool has_uses() const { return use_count() > 0; } bool has_uses() const { return use_count() > 0; }
bool is_root() const { return is_pinned() || use_count() > 1; } ValueStack* state_before() const { return _state_before; }
ValueStack* exception_state() const { return _exception_state; }
virtual bool needs_exception_state() const { return true; }
XHandlers* exception_handlers() const { return _exception_handlers; } XHandlers* exception_handlers() const { return _exception_handlers; }
// manipulation // manipulation
@ -403,19 +409,25 @@ class Instruction: public CompilationResourceObj {
void pin() { _pin_state |= PinUnknown; } void pin() { _pin_state |= PinUnknown; }
// DANGEROUS: only used by EliminateStores // DANGEROUS: only used by EliminateStores
void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
virtual void set_lock_stack(ValueStack* l) { /* do nothing*/ }
virtual ValueStack* lock_stack() const { return NULL; }
Instruction* set_next(Instruction* next, int bci) { Instruction* set_next(Instruction* next) {
if (next != NULL) { assert(next->has_printable_bci(), "_printable_bci should have been set");
assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); assert(next != NULL, "must not be NULL");
assert(next->as_Phi() == NULL && next->as_Local() == NULL, "shouldn't link these instructions into list"); assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
next->set_bci(bci); assert(next->can_be_linked(), "shouldn't link these instructions into list");
}
next->set_flag(Instruction::IsLinkedInBlockFlag, true);
_next = next; _next = next;
return next; return next;
} }
Instruction* set_next(Instruction* next, int bci) {
#ifndef PRODUCT
next->set_printable_bci(bci);
#endif
return set_next(next);
}
void set_subst(Instruction* subst) { void set_subst(Instruction* subst) {
assert(subst == NULL || assert(subst == NULL ||
type()->base() == subst->type()->base() || type()->base() == subst->type()->base() ||
@ -423,14 +435,7 @@ class Instruction: public CompilationResourceObj {
_subst = subst; _subst = subst;
} }
void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
#ifdef ASSERT
// HiWord is used for debugging and is allocated early to avoid
// allocation at inconvenient points
HiWord* hi_word() { return _hi_word; }
void create_hi_word();
#endif
// machine-specifics // machine-specifics
void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
@ -438,7 +443,6 @@ class Instruction: public CompilationResourceObj {
// generic // generic
virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
virtual HiWord* as_HiWord() { return NULL; }
virtual Phi* as_Phi() { return NULL; } virtual Phi* as_Phi() { return NULL; }
virtual Local* as_Local() { return NULL; } virtual Local* as_Local() { return NULL; }
virtual Constant* as_Constant() { return NULL; } virtual Constant* as_Constant() { return NULL; }
@ -493,7 +497,7 @@ class Instruction: public CompilationResourceObj {
virtual bool can_trap() const { return false; } virtual bool can_trap() const { return false; }
virtual void input_values_do(ValueVisitor* f) = 0; virtual void input_values_do(ValueVisitor* f) = 0;
virtual void state_values_do(ValueVisitor* f) { /* usually no state - override on demand */ } virtual void state_values_do(ValueVisitor* f);
virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
@ -505,6 +509,7 @@ class Instruction: public CompilationResourceObj {
HASHING1(Instruction, false, id()) // hashing disabled by default HASHING1(Instruction, false, id()) // hashing disabled by default
// debugging // debugging
static void check_state(ValueStack* state) PRODUCT_RETURN;
void print() PRODUCT_RETURN; void print() PRODUCT_RETURN;
void print_line() PRODUCT_RETURN; void print_line() PRODUCT_RETURN;
void print(InstructionPrinter& ip) PRODUCT_RETURN; void print(InstructionPrinter& ip) PRODUCT_RETURN;
@ -541,40 +546,6 @@ class AssertValues: public ValueVisitor {
#endif // ASSERT #endif // ASSERT
// A HiWord occupies the 'high word' of a 2-word
// expression stack entry. Hi & lo words must be
// paired on the expression stack (otherwise the
// bytecode sequence is illegal). Note that 'hi'
// refers to the IR expression stack format and
// does *not* imply a machine word ordering. No
// HiWords are used in optimized mode for speed,
// but NULL pointers are used instead.
LEAF(HiWord, Instruction)
private:
Value _lo_word;
public:
// creation
HiWord(Value lo_word)
: Instruction(illegalType, false, false),
_lo_word(lo_word) {
// hi-words are also allowed for illegal lo-words
assert(lo_word->type()->is_double_word() || lo_word->type()->is_illegal(),
"HiWord must be used for 2-word values only");
}
// accessors
Value lo_word() const { return _lo_word->subst(); }
// for invalidating of HiWords
void make_illegal() { set_type(illegalType); }
// generic
virtual void input_values_do(ValueVisitor* f) { ShouldNotReachHere(); }
};
// A Phi is a phi function in the sense of SSA form. It stands for // A Phi is a phi function in the sense of SSA form. It stands for
// the value of a local variable at the beginning of a join block. // the value of a local variable at the beginning of a join block.
// A Phi consists of n operands, one for every incoming branch. // A Phi consists of n operands, one for every incoming branch.
@ -656,31 +627,25 @@ LEAF(Local, Instruction)
LEAF(Constant, Instruction) LEAF(Constant, Instruction)
ValueStack* _state;
public: public:
// creation // creation
Constant(ValueType* type): Constant(ValueType* type):
Instruction(type, true) Instruction(type, NULL, true)
, _state(NULL) { {
assert(type->is_constant(), "must be a constant"); assert(type->is_constant(), "must be a constant");
} }
Constant(ValueType* type, ValueStack* state): Constant(ValueType* type, ValueStack* state_before):
Instruction(type, true) Instruction(type, state_before, true)
, _state(state) { {
assert(state != NULL, "only used for constants which need patching"); assert(state_before != NULL, "only used for constants which need patching");
assert(type->is_constant(), "must be a constant"); assert(type->is_constant(), "must be a constant");
// since it's patching it needs to be pinned // since it's patching it needs to be pinned
pin(); pin();
} }
ValueStack* state() const { return _state; } virtual bool can_trap() const { return state_before() != NULL; }
// generic
virtual bool can_trap() const { return state() != NULL; }
virtual void input_values_do(ValueVisitor* f) { /* no values */ } virtual void input_values_do(ValueVisitor* f) { /* no values */ }
virtual void other_values_do(ValueVisitor* f);
virtual intx hash() const; virtual intx hash() const;
virtual bool is_equal(Value v) const; virtual bool is_equal(Value v) const;
@ -695,20 +660,16 @@ BASE(AccessField, Instruction)
Value _obj; Value _obj;
int _offset; int _offset;
ciField* _field; ciField* _field;
ValueStack* _state_before; // state is set only for unloaded or uninitialized fields
ValueStack* _lock_stack; // contains lock and scope information
NullCheck* _explicit_null_check; // For explicit null check elimination NullCheck* _explicit_null_check; // For explicit null check elimination
public: public:
// creation // creation
AccessField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack, AccessField(Value obj, int offset, ciField* field, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: Instruction(as_ValueType(field->type()->basic_type())) : Instruction(as_ValueType(field->type()->basic_type()), state_before)
, _obj(obj) , _obj(obj)
, _offset(offset) , _offset(offset)
, _field(field) , _field(field)
, _lock_stack(lock_stack)
, _state_before(state_before)
, _explicit_null_check(NULL) , _explicit_null_check(NULL)
{ {
set_needs_null_check(!is_static); set_needs_null_check(!is_static);
@ -734,13 +695,11 @@ BASE(AccessField, Instruction)
bool is_static() const { return check_flag(IsStaticFlag); } bool is_static() const { return check_flag(IsStaticFlag); }
bool is_loaded() const { return check_flag(IsLoadedFlag); } bool is_loaded() const { return check_flag(IsLoadedFlag); }
bool is_initialized() const { return check_flag(IsInitializedFlag); } bool is_initialized() const { return check_flag(IsInitializedFlag); }
ValueStack* state_before() const { return _state_before; }
ValueStack* lock_stack() const { return _lock_stack; }
NullCheck* explicit_null_check() const { return _explicit_null_check; } NullCheck* explicit_null_check() const { return _explicit_null_check; }
bool needs_patching() const { return check_flag(NeedsPatchingFlag); } bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
// manipulation // manipulation
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// Under certain circumstances, if a previous NullCheck instruction // Under certain circumstances, if a previous NullCheck instruction
// proved the target object non-null, we can eliminate the explicit // proved the target object non-null, we can eliminate the explicit
// null check and do an implicit one, simply specifying the debug // null check and do an implicit one, simply specifying the debug
@ -751,16 +710,15 @@ BASE(AccessField, Instruction)
// generic // generic
virtual bool can_trap() const { return needs_null_check() || needs_patching(); } virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
}; };
LEAF(LoadField, AccessField) LEAF(LoadField, AccessField)
public: public:
// creation // creation
LoadField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack, LoadField(Value obj, int offset, ciField* field, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized) : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
{} {}
ciType* declared_type() const; ciType* declared_type() const;
@ -777,9 +735,9 @@ LEAF(StoreField, AccessField)
public: public:
// creation // creation
StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, ValueStack* lock_stack, StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized) : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
, _value(value) , _value(value)
{ {
set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
@ -799,29 +757,23 @@ LEAF(StoreField, AccessField)
BASE(AccessArray, Instruction) BASE(AccessArray, Instruction)
private: private:
Value _array; Value _array;
ValueStack* _lock_stack;
public: public:
// creation // creation
AccessArray(ValueType* type, Value array, ValueStack* lock_stack) AccessArray(ValueType* type, Value array, ValueStack* state_before)
: Instruction(type) : Instruction(type, state_before)
, _array(array) , _array(array)
, _lock_stack(lock_stack) { {
set_needs_null_check(true); set_needs_null_check(true);
ASSERT_VALUES ASSERT_VALUES
pin(); // instruction with side effect (null exception or range check throwing) pin(); // instruction with side effect (null exception or range check throwing)
} }
Value array() const { return _array; } Value array() const { return _array; }
ValueStack* lock_stack() const { return _lock_stack; }
// setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// generic // generic
virtual bool can_trap() const { return needs_null_check(); } virtual bool can_trap() const { return needs_null_check(); }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
virtual void other_values_do(ValueVisitor* f);
}; };
@ -831,8 +783,8 @@ LEAF(ArrayLength, AccessArray)
public: public:
// creation // creation
ArrayLength(Value array, ValueStack* lock_stack) ArrayLength(Value array, ValueStack* state_before)
: AccessArray(intType, array, lock_stack) : AccessArray(intType, array, state_before)
, _explicit_null_check(NULL) {} , _explicit_null_check(NULL) {}
// accessors // accessors
@ -855,8 +807,8 @@ BASE(AccessIndexed, AccessArray)
public: public:
// creation // creation
AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack) AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
: AccessArray(as_ValueType(elt_type), array, lock_stack) : AccessArray(as_ValueType(elt_type), array, state_before)
, _index(index) , _index(index)
, _length(length) , _length(length)
, _elt_type(elt_type) , _elt_type(elt_type)
@ -883,8 +835,8 @@ LEAF(LoadIndexed, AccessIndexed)
public: public:
// creation // creation
LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack) LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
: AccessIndexed(array, index, length, elt_type, lock_stack) : AccessIndexed(array, index, length, elt_type, state_before)
, _explicit_null_check(NULL) {} , _explicit_null_check(NULL) {}
// accessors // accessors
@ -910,8 +862,8 @@ LEAF(StoreIndexed, AccessIndexed)
int _profiled_bci; int _profiled_bci;
public: public:
// creation // creation
StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* lock_stack) StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
: AccessIndexed(array, index, length, elt_type, lock_stack) : AccessIndexed(array, index, length, elt_type, state_before)
, _value(value), _profiled_method(NULL), _profiled_bci(0) , _value(value), _profiled_method(NULL), _profiled_bci(0)
{ {
set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
@ -922,7 +874,6 @@ LEAF(StoreIndexed, AccessIndexed)
// accessors // accessors
Value value() const { return _value; } Value value() const { return _value; }
IRScope* scope() const; // the state's scope
bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
// Helpers for methodDataOop profiling // Helpers for methodDataOop profiling
@ -963,7 +914,12 @@ BASE(Op2, Instruction)
public: public:
// creation // creation
Op2(ValueType* type, Bytecodes::Code op, Value x, Value y) : Instruction(type), _op(op), _x(x), _y(y) { Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
: Instruction(type, state_before)
, _op(op)
, _x(x)
, _y(y)
{
ASSERT_VALUES ASSERT_VALUES
} }
@ -985,28 +941,21 @@ BASE(Op2, Instruction)
LEAF(ArithmeticOp, Op2) LEAF(ArithmeticOp, Op2)
private:
ValueStack* _lock_stack; // used only for division operations
public: public:
// creation // creation
ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* lock_stack) ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
: Op2(x->type()->meet(y->type()), op, x, y) : Op2(x->type()->meet(y->type()), op, x, y, state_before)
, _lock_stack(lock_stack) { {
set_flag(IsStrictfpFlag, is_strictfp); set_flag(IsStrictfpFlag, is_strictfp);
if (can_trap()) pin(); if (can_trap()) pin();
} }
// accessors // accessors
ValueStack* lock_stack() const { return _lock_stack; }
bool is_strictfp() const { return check_flag(IsStrictfpFlag); } bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
// setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// generic // generic
virtual bool is_commutative() const; virtual bool is_commutative() const;
virtual bool can_trap() const; virtual bool can_trap() const;
virtual void other_values_do(ValueVisitor* f);
HASHING3(Op2, true, op(), x()->subst(), y()->subst()) HASHING3(Op2, true, op(), x()->subst(), y()->subst())
}; };
@ -1033,21 +982,14 @@ LEAF(LogicOp, Op2)
LEAF(CompareOp, Op2) LEAF(CompareOp, Op2)
private:
ValueStack* _state_before; // for deoptimization, when canonicalizing
public: public:
// creation // creation
CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
: Op2(intType, op, x, y) : Op2(intType, op, x, y, state_before)
, _state_before(state_before)
{} {}
// accessors
ValueStack* state_before() const { return _state_before; }
// generic // generic
HASHING3(Op2, true, op(), x()->subst(), y()->subst()) HASHING3(Op2, true, op(), x()->subst(), y()->subst())
virtual void other_values_do(ValueVisitor* f);
}; };
@ -1103,11 +1045,13 @@ LEAF(Convert, Instruction)
LEAF(NullCheck, Instruction) LEAF(NullCheck, Instruction)
private: private:
Value _obj; Value _obj;
ValueStack* _lock_stack;
public: public:
// creation // creation
NullCheck(Value obj, ValueStack* lock_stack) : Instruction(obj->type()->base()), _obj(obj), _lock_stack(lock_stack) { NullCheck(Value obj, ValueStack* state_before)
: Instruction(obj->type()->base(), state_before)
, _obj(obj)
{
ASSERT_VALUES ASSERT_VALUES
set_can_trap(true); set_can_trap(true);
assert(_obj->type()->is_object(), "null check must be applied to objects only"); assert(_obj->type()->is_object(), "null check must be applied to objects only");
@ -1116,16 +1060,13 @@ LEAF(NullCheck, Instruction)
// accessors // accessors
Value obj() const { return _obj; } Value obj() const { return _obj; }
ValueStack* lock_stack() const { return _lock_stack; }
// setters // setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
// generic // generic
virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
HASHING1(NullCheck, true, obj()->subst()) HASHING1(NullCheck, true, obj()->subst())
}; };
@ -1139,7 +1080,10 @@ BASE(StateSplit, Instruction)
public: public:
// creation // creation
StateSplit(ValueType* type) : Instruction(type), _state(NULL) { StateSplit(ValueType* type, ValueStack* state_before = NULL)
: Instruction(type, state_before)
, _state(NULL)
{
pin(PinStateSplitConstructor); pin(PinStateSplitConstructor);
} }
@ -1148,7 +1092,7 @@ BASE(StateSplit, Instruction)
IRScope* scope() const; // the state's scope IRScope* scope() const; // the state's scope
// manipulation // manipulation
void set_state(ValueStack* state) { _state = state; } void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
// generic // generic
virtual void input_values_do(ValueVisitor* f) { /* no values */ } virtual void input_values_do(ValueVisitor* f) { /* no values */ }
@ -1164,7 +1108,6 @@ LEAF(Invoke, StateSplit)
BasicTypeList* _signature; BasicTypeList* _signature;
int _vtable_index; int _vtable_index;
ciMethod* _target; ciMethod* _target;
ValueStack* _state_before; // Required for deoptimization.
public: public:
// creation // creation
@ -1180,7 +1123,6 @@ LEAF(Invoke, StateSplit)
int vtable_index() const { return _vtable_index; } int vtable_index() const { return _vtable_index; }
BasicTypeList* signature() const { return _signature; } BasicTypeList* signature() const { return _signature; }
ciMethod* target() const { return _target; } ciMethod* target() const { return _target; }
ValueStack* state_before() const { return _state_before; }
// Returns false if target is not loaded // Returns false if target is not loaded
bool target_is_final() const { return check_flag(TargetIsFinalFlag); } bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
@ -1191,6 +1133,8 @@ LEAF(Invoke, StateSplit)
// JSR 292 support // JSR 292 support
bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { virtual void input_values_do(ValueVisitor* f) {
@ -1208,11 +1152,16 @@ LEAF(NewInstance, StateSplit)
public: public:
// creation // creation
NewInstance(ciInstanceKlass* klass) : StateSplit(instanceType), _klass(klass) {} NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
: StateSplit(instanceType, state_before)
, _klass(klass)
{}
// accessors // accessors
ciInstanceKlass* klass() const { return _klass; } ciInstanceKlass* klass() const { return _klass; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
ciType* exact_type() const; ciType* exact_type() const;
@ -1222,22 +1171,24 @@ LEAF(NewInstance, StateSplit)
BASE(NewArray, StateSplit) BASE(NewArray, StateSplit)
private: private:
Value _length; Value _length;
ValueStack* _state_before;
public: public:
// creation // creation
NewArray(Value length, ValueStack* state_before) : StateSplit(objectType), _length(length), _state_before(state_before) { NewArray(Value length, ValueStack* state_before)
: StateSplit(objectType, state_before)
, _length(length)
{
// Do not ASSERT_VALUES since length is NULL for NewMultiArray // Do not ASSERT_VALUES since length is NULL for NewMultiArray
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
Value length() const { return _length; } Value length() const { return _length; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
virtual void other_values_do(ValueVisitor* f);
}; };
@ -1247,7 +1198,10 @@ LEAF(NewTypeArray, NewArray)
public: public:
// creation // creation
NewTypeArray(Value length, BasicType elt_type) : NewArray(length, NULL), _elt_type(elt_type) {} NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
: NewArray(length, state_before)
, _elt_type(elt_type)
{}
// accessors // accessors
BasicType elt_type() const { return _elt_type; } BasicType elt_type() const { return _elt_type; }
@ -1303,7 +1257,6 @@ BASE(TypeCheck, StateSplit)
private: private:
ciKlass* _klass; ciKlass* _klass;
Value _obj; Value _obj;
ValueStack* _state_before;
ciMethod* _profiled_method; ciMethod* _profiled_method;
int _profiled_bci; int _profiled_bci;
@ -1311,14 +1264,13 @@ BASE(TypeCheck, StateSplit)
public: public:
// creation // creation
TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
: StateSplit(type), _klass(klass), _obj(obj), _state_before(state_before), : StateSplit(type, state_before), _klass(klass), _obj(obj),
_profiled_method(NULL), _profiled_bci(0) { _profiled_method(NULL), _profiled_bci(0) {
ASSERT_VALUES ASSERT_VALUES
set_direct_compare(false); set_direct_compare(false);
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
ciKlass* klass() const { return _klass; } ciKlass* klass() const { return _klass; }
Value obj() const { return _obj; } Value obj() const { return _obj; }
bool is_loaded() const { return klass() != NULL; } bool is_loaded() const { return klass() != NULL; }
@ -1330,7 +1282,6 @@ BASE(TypeCheck, StateSplit)
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
// Helpers for methodDataOop profiling // Helpers for methodDataOop profiling
void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
@ -1364,6 +1315,8 @@ LEAF(InstanceOf, TypeCheck)
public: public:
// creation // creation
InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
virtual bool needs_exception_state() const { return false; }
}; };
@ -1374,8 +1327,8 @@ BASE(AccessMonitor, StateSplit)
public: public:
// creation // creation
AccessMonitor(Value obj, int monitor_no) AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
: StateSplit(illegalType) : StateSplit(illegalType, state_before)
, _obj(obj) , _obj(obj)
, _monitor_no(monitor_no) , _monitor_no(monitor_no)
{ {
@ -1393,22 +1346,14 @@ BASE(AccessMonitor, StateSplit)
LEAF(MonitorEnter, AccessMonitor) LEAF(MonitorEnter, AccessMonitor)
private:
ValueStack* _lock_stack_before;
public: public:
// creation // creation
MonitorEnter(Value obj, int monitor_no, ValueStack* lock_stack_before) MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
: AccessMonitor(obj, monitor_no) : AccessMonitor(obj, monitor_no, state_before)
, _lock_stack_before(lock_stack_before)
{ {
ASSERT_VALUES ASSERT_VALUES
} }
// accessors
ValueStack* lock_stack_before() const { return _lock_stack_before; }
virtual void state_values_do(ValueVisitor* f);
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
}; };
@ -1417,7 +1362,11 @@ LEAF(MonitorEnter, AccessMonitor)
LEAF(MonitorExit, AccessMonitor) LEAF(MonitorExit, AccessMonitor)
public: public:
// creation // creation
MonitorExit(Value obj, int monitor_no) : AccessMonitor(obj, monitor_no) {} MonitorExit(Value obj, int monitor_no)
: AccessMonitor(obj, monitor_no, NULL)
{
ASSERT_VALUES
}
}; };
@ -1425,7 +1374,6 @@ LEAF(Intrinsic, StateSplit)
private: private:
vmIntrinsics::ID _id; vmIntrinsics::ID _id;
Values* _args; Values* _args;
ValueStack* _lock_stack;
Value _recv; Value _recv;
public: public:
@ -1440,13 +1388,12 @@ LEAF(Intrinsic, StateSplit)
vmIntrinsics::ID id, vmIntrinsics::ID id,
Values* args, Values* args,
bool has_receiver, bool has_receiver,
ValueStack* lock_stack, ValueStack* state_before,
bool preserves_state, bool preserves_state,
bool cantrap = true) bool cantrap = true)
: StateSplit(type) : StateSplit(type, state_before)
, _id(id) , _id(id)
, _args(args) , _args(args)
, _lock_stack(lock_stack)
, _recv(NULL) , _recv(NULL)
{ {
assert(args != NULL, "args must exist"); assert(args != NULL, "args must exist");
@ -1468,7 +1415,6 @@ LEAF(Intrinsic, StateSplit)
vmIntrinsics::ID id() const { return _id; } vmIntrinsics::ID id() const { return _id; }
int number_of_arguments() const { return _args->length(); } int number_of_arguments() const { return _args->length(); }
Value argument_at(int i) const { return _args->at(i); } Value argument_at(int i) const { return _args->at(i); }
ValueStack* lock_stack() const { return _lock_stack; }
bool has_receiver() const { return (_recv != NULL); } bool has_receiver() const { return (_recv != NULL); }
Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
@ -1480,8 +1426,6 @@ LEAF(Intrinsic, StateSplit)
StateSplit::input_values_do(f); StateSplit::input_values_do(f);
for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
} }
virtual void state_values_do(ValueVisitor* f);
}; };
@ -1490,6 +1434,7 @@ class LIR_List;
LEAF(BlockBegin, StateSplit) LEAF(BlockBegin, StateSplit)
private: private:
int _block_id; // the unique block id int _block_id; // the unique block id
int _bci; // start-bci of block
int _depth_first_number; // number of this block in a depth-first ordering int _depth_first_number; // number of this block in a depth-first ordering
int _linear_scan_number; // number of this block in linear-scan ordering int _linear_scan_number; // number of this block in linear-scan ordering
int _loop_depth; // the loop nesting level of this block int _loop_depth; // the loop nesting level of this block
@ -1546,6 +1491,7 @@ LEAF(BlockBegin, StateSplit)
// creation // creation
BlockBegin(int bci) BlockBegin(int bci)
: StateSplit(illegalType) : StateSplit(illegalType)
, _bci(bci)
, _depth_first_number(-1) , _depth_first_number(-1)
, _linear_scan_number(-1) , _linear_scan_number(-1)
, _loop_depth(0) , _loop_depth(0)
@ -1570,11 +1516,14 @@ LEAF(BlockBegin, StateSplit)
, _total_preds(0) , _total_preds(0)
, _stores_to_locals() , _stores_to_locals()
{ {
set_bci(bci); #ifndef PRODUCT
set_printable_bci(bci);
#endif
} }
// accessors // accessors
int block_id() const { return _block_id; } int block_id() const { return _block_id; }
int bci() const { return _bci; }
BlockList* successors() { return &_successors; } BlockList* successors() { return &_successors; }
BlockBegin* dominator() const { return _dominator; } BlockBegin* dominator() const { return _dominator; }
int loop_depth() const { return _loop_depth; } int loop_depth() const { return _loop_depth; }
@ -1596,7 +1545,6 @@ LEAF(BlockBegin, StateSplit)
BitMap& stores_to_locals() { return _stores_to_locals; } BitMap& stores_to_locals() { return _stores_to_locals; }
// manipulation // manipulation
void set_bci(int bci) { Instruction::set_bci(bci); }
void set_dominator(BlockBegin* dom) { _dominator = dom; } void set_dominator(BlockBegin* dom) { _dominator = dom; }
void set_loop_depth(int d) { _loop_depth = d; } void set_loop_depth(int d) { _loop_depth = d; }
void set_depth_first_number(int dfn) { _depth_first_number = dfn; } void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
@ -1694,7 +1642,6 @@ BASE(BlockEnd, StateSplit)
private: private:
BlockBegin* _begin; BlockBegin* _begin;
BlockList* _sux; BlockList* _sux;
ValueStack* _state_before;
protected: protected:
BlockList* sux() const { return _sux; } BlockList* sux() const { return _sux; }
@ -1710,24 +1657,20 @@ BASE(BlockEnd, StateSplit)
public: public:
// creation // creation
BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
: StateSplit(type) : StateSplit(type, state_before)
, _begin(NULL) , _begin(NULL)
, _sux(NULL) , _sux(NULL)
, _state_before(state_before) { {
set_flag(IsSafepointFlag, is_safepoint); set_flag(IsSafepointFlag, is_safepoint);
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
bool is_safepoint() const { return check_flag(IsSafepointFlag); } bool is_safepoint() const { return check_flag(IsSafepointFlag); }
BlockBegin* begin() const { return _begin; } BlockBegin* begin() const { return _begin; }
// manipulation // manipulation
void set_begin(BlockBegin* begin); void set_begin(BlockBegin* begin);
// generic
virtual void other_values_do(ValueVisitor* f);
// successors // successors
int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
BlockBegin* sux_at(int i) const { return _sux->at(i); } BlockBegin* sux_at(int i) const { return _sux->at(i); }
@ -1919,6 +1862,8 @@ BASE(Switch, BlockEnd)
Value tag() const { return _tag; } Value tag() const { return _tag; }
int length() const { return number_of_sux() - 1; } int length() const { return number_of_sux() - 1; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
}; };
@ -1996,7 +1941,6 @@ LEAF(Throw, BlockEnd)
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
virtual void state_values_do(ValueVisitor* f);
}; };
@ -2022,9 +1966,9 @@ LEAF(OsrEntry, Instruction)
public: public:
// creation // creation
#ifdef _LP64 #ifdef _LP64
OsrEntry() : Instruction(longType, false) { pin(); } OsrEntry() : Instruction(longType) { pin(); }
#else #else
OsrEntry() : Instruction(intType, false) { pin(); } OsrEntry() : Instruction(intType) { pin(); }
#endif #endif
// generic // generic
@ -2036,7 +1980,7 @@ LEAF(OsrEntry, Instruction)
LEAF(ExceptionObject, Instruction) LEAF(ExceptionObject, Instruction)
public: public:
// creation // creation
ExceptionObject() : Instruction(objectType, false) { ExceptionObject() : Instruction(objectType) {
pin(); pin();
} }
@ -2091,7 +2035,6 @@ BASE(UnsafeOp, Instruction)
// generic // generic
virtual void input_values_do(ValueVisitor* f) { } virtual void input_values_do(ValueVisitor* f) { }
virtual void other_values_do(ValueVisitor* f) { }
}; };
@ -2303,13 +2246,11 @@ LEAF(ProfileInvoke, Instruction)
private: private:
ciMethod* _inlinee; ciMethod* _inlinee;
ValueStack* _state; ValueStack* _state;
int _bci_of_invoke;
public: public:
ProfileInvoke(ciMethod* inlinee, ValueStack* state, int bci) ProfileInvoke(ciMethod* inlinee, ValueStack* state)
: Instruction(voidType) : Instruction(voidType)
, _inlinee(inlinee) , _inlinee(inlinee)
, _bci_of_invoke(bci)
, _state(state) , _state(state)
{ {
// The ProfileInvoke has side-effects and must occur precisely where located QQQ??? // The ProfileInvoke has side-effects and must occur precisely where located QQQ???
@ -2318,7 +2259,6 @@ LEAF(ProfileInvoke, Instruction)
ciMethod* inlinee() { return _inlinee; } ciMethod* inlinee() { return _inlinee; }
ValueStack* state() { return _state; } ValueStack* state() { return _state; }
int bci_of_invoke() { return _bci_of_invoke; }
virtual void input_values_do(ValueVisitor*) {} virtual void input_values_do(ValueVisitor*) {}
virtual void state_values_do(ValueVisitor*); virtual void state_values_do(ValueVisitor*);
}; };

View File

@ -316,7 +316,7 @@ void InstructionPrinter::print_head() {
void InstructionPrinter::print_line(Instruction* instr) { void InstructionPrinter::print_line(Instruction* instr) {
// print instruction data on one line // print instruction data on one line
if (instr->is_pinned()) output()->put('.'); if (instr->is_pinned()) output()->put('.');
fill_to(bci_pos ); output()->print("%d", instr->bci()); fill_to(bci_pos ); output()->print("%d", instr->printable_bci());
fill_to(use_pos ); output()->print("%d", instr->use_count()); fill_to(use_pos ); output()->print("%d", instr->use_count());
fill_to(temp_pos ); print_temp(instr); fill_to(temp_pos ); print_temp(instr);
fill_to(instr_pos); print_instr(instr); fill_to(instr_pos); print_instr(instr);
@ -569,7 +569,7 @@ void InstructionPrinter::do_BlockBegin(BlockBegin* x) {
if (printed_flag) output()->print(") "); if (printed_flag) output()->print(") ");
// print block bci range // print block bci range
output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->bci())); output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->printable_bci()));
// print block successors // print block successors
if (end != NULL && end->number_of_sux() > 0) { if (end != NULL && end->number_of_sux() > 0) {

View File

@ -211,6 +211,7 @@ void LIR_OprDesc::validate_type() const {
case T_BYTE: case T_BYTE:
case T_SHORT: case T_SHORT:
case T_INT: case T_INT:
case T_ADDRESS:
case T_OBJECT: case T_OBJECT:
case T_ARRAY: case T_ARRAY:
assert((kind_field() == cpu_register || kind_field() == stack_value) && assert((kind_field() == cpu_register || kind_field() == stack_value) &&
@ -1519,7 +1520,7 @@ static void print_block(BlockBegin* x) {
if (x->is_set(BlockBegin::linear_scan_loop_end_flag)) tty->print("le "); if (x->is_set(BlockBegin::linear_scan_loop_end_flag)) tty->print("le ");
// print block bci range // print block bci range
tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->bci())); tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->printable_bci()));
// print predecessors and successors // print predecessors and successors
if (x->number_of_preds() > 0) { if (x->number_of_preds() > 0) {
@ -1575,7 +1576,7 @@ void LIR_Op::print_on(outputStream* out) const {
} }
out->print(name()); out->print(" "); out->print(name()); out->print(" ");
print_instr(out); print_instr(out);
if (info() != NULL) out->print(" [bci:%d]", info()->bci()); if (info() != NULL) out->print(" [bci:%d]", info()->stack()->bci());
#ifdef ASSERT #ifdef ASSERT
if (Verbose && _file != NULL) { if (Verbose && _file != NULL) {
out->print(" (%s:%d)", _file, _line); out->print(" (%s:%d)", _file, _line);
@ -1780,7 +1781,7 @@ void LIR_OpBranch::print_instr(outputStream* out) const {
out->print("["); out->print("[");
stub()->print_name(out); stub()->print_name(out);
out->print(": 0x%x]", stub()); out->print(": 0x%x]", stub());
if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->bci()); if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->stack()->bci());
} else { } else {
out->print("[label:0x%x] ", label()); out->print("[label:0x%x] ", label());
} }
@ -1895,7 +1896,7 @@ void LIR_OpTypeCheck::print_instr(outputStream* out) const {
tmp2()->print(out); out->print(" "); tmp2()->print(out); out->print(" ");
tmp3()->print(out); out->print(" "); tmp3()->print(out); out->print(" ");
result_opr()->print(out); out->print(" "); result_opr()->print(out); out->print(" ");
if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->bci()); if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->stack()->bci());
} }

View File

@ -280,7 +280,7 @@ class LIR_OprDesc: public CompilationResourceObj {
, int_type = 1 << type_shift , int_type = 1 << type_shift
, long_type = 2 << type_shift , long_type = 2 << type_shift
, object_type = 3 << type_shift , object_type = 3 << type_shift
, pointer_type = 4 << type_shift , address_type = 4 << type_shift
, float_type = 5 << type_shift , float_type = 5 << type_shift
, double_type = 6 << type_shift , double_type = 6 << type_shift
}; };
@ -303,6 +303,7 @@ class LIR_OprDesc: public CompilationResourceObj {
case T_BYTE: case T_BYTE:
case T_SHORT: case T_SHORT:
case T_INT: case T_INT:
case T_ADDRESS:
case T_OBJECT: case T_OBJECT:
case T_ARRAY: case T_ARRAY:
return single_size; return single_size;
@ -456,6 +457,7 @@ inline LIR_OprDesc::OprType as_OprType(BasicType type) {
case T_DOUBLE: return LIR_OprDesc::double_type; case T_DOUBLE: return LIR_OprDesc::double_type;
case T_OBJECT: case T_OBJECT:
case T_ARRAY: return LIR_OprDesc::object_type; case T_ARRAY: return LIR_OprDesc::object_type;
case T_ADDRESS: return LIR_OprDesc::address_type;
case T_ILLEGAL: // fall through case T_ILLEGAL: // fall through
default: ShouldNotReachHere(); return LIR_OprDesc::unknown_type; default: ShouldNotReachHere(); return LIR_OprDesc::unknown_type;
} }
@ -468,6 +470,7 @@ inline BasicType as_BasicType(LIR_OprDesc::OprType t) {
case LIR_OprDesc::float_type: return T_FLOAT; case LIR_OprDesc::float_type: return T_FLOAT;
case LIR_OprDesc::double_type: return T_DOUBLE; case LIR_OprDesc::double_type: return T_DOUBLE;
case LIR_OprDesc::object_type: return T_OBJECT; case LIR_OprDesc::object_type: return T_OBJECT;
case LIR_OprDesc::address_type: return T_ADDRESS;
case LIR_OprDesc::unknown_type: // fall through case LIR_OprDesc::unknown_type: // fall through
default: ShouldNotReachHere(); return T_ILLEGAL; default: ShouldNotReachHere(); return T_ILLEGAL;
} }
@ -550,8 +553,24 @@ class LIR_OprFact: public AllStatic {
static LIR_Opr illegalOpr; static LIR_Opr illegalOpr;
static LIR_Opr single_cpu(int reg) { return (LIR_Opr)(intptr_t)((reg << LIR_OprDesc::reg1_shift) | LIR_OprDesc::int_type | LIR_OprDesc::cpu_register | LIR_OprDesc::single_size); } static LIR_Opr single_cpu(int reg) {
static LIR_Opr single_cpu_oop(int reg) { return (LIR_Opr)(intptr_t)((reg << LIR_OprDesc::reg1_shift) | LIR_OprDesc::object_type | LIR_OprDesc::cpu_register | LIR_OprDesc::single_size); } return (LIR_Opr)(intptr_t)((reg << LIR_OprDesc::reg1_shift) |
LIR_OprDesc::int_type |
LIR_OprDesc::cpu_register |
LIR_OprDesc::single_size);
}
static LIR_Opr single_cpu_oop(int reg) {
return (LIR_Opr)(intptr_t)((reg << LIR_OprDesc::reg1_shift) |
LIR_OprDesc::object_type |
LIR_OprDesc::cpu_register |
LIR_OprDesc::single_size);
}
static LIR_Opr single_cpu_address(int reg) {
return (LIR_Opr)(intptr_t)((reg << LIR_OprDesc::reg1_shift) |
LIR_OprDesc::address_type |
LIR_OprDesc::cpu_register |
LIR_OprDesc::single_size);
}
static LIR_Opr double_cpu(int reg1, int reg2) { static LIR_Opr double_cpu(int reg1, int reg2) {
LP64_ONLY(assert(reg1 == reg2, "must be identical")); LP64_ONLY(assert(reg1 == reg2, "must be identical"));
return (LIR_Opr)(intptr_t)((reg1 << LIR_OprDesc::reg1_shift) | return (LIR_Opr)(intptr_t)((reg1 << LIR_OprDesc::reg1_shift) |
@ -633,6 +652,14 @@ class LIR_OprFact: public AllStatic {
LIR_OprDesc::virtual_mask); LIR_OprDesc::virtual_mask);
break; break;
case T_ADDRESS:
res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) |
LIR_OprDesc::address_type |
LIR_OprDesc::cpu_register |
LIR_OprDesc::single_size |
LIR_OprDesc::virtual_mask);
break;
case T_LONG: case T_LONG:
res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) | res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) |
LIR_OprDesc::long_type | LIR_OprDesc::long_type |
@ -721,6 +748,13 @@ class LIR_OprFact: public AllStatic {
LIR_OprDesc::single_size); LIR_OprDesc::single_size);
break; break;
case T_ADDRESS:
res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) |
LIR_OprDesc::address_type |
LIR_OprDesc::stack_value |
LIR_OprDesc::single_size);
break;
case T_LONG: case T_LONG:
res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) | res = (LIR_Opr)(intptr_t)((index << LIR_OprDesc::data_shift) |
LIR_OprDesc::long_type | LIR_OprDesc::long_type |

View File

@ -35,7 +35,7 @@ void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_cod
append_patching_stub(patch); append_patching_stub(patch);
#ifdef ASSERT #ifdef ASSERT
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci()); Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
if (patch->id() == PatchingStub::access_field_id) { if (patch->id() == PatchingStub::access_field_id) {
switch (code) { switch (code) {
case Bytecodes::_putstatic: case Bytecodes::_putstatic:
@ -221,7 +221,7 @@ void LIR_Assembler::emit_block(BlockBegin* block) {
#ifndef PRODUCT #ifndef PRODUCT
if (CommentedAssembly) { if (CommentedAssembly) {
stringStream st; stringStream st;
st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->bci()); st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->printable_bci());
_masm->block_comment(st.as_string()); _masm->block_comment(st.as_string());
} }
#endif #endif
@ -312,7 +312,7 @@ void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {
static ValueStack* debug_info(Instruction* ins) { static ValueStack* debug_info(Instruction* ins) {
StateSplit* ss = ins->as_StateSplit(); StateSplit* ss = ins->as_StateSplit();
if (ss != NULL) return ss->state(); if (ss != NULL) return ss->state();
return ins->lock_stack(); return ins->state_before();
} }
void LIR_Assembler::process_debug_info(LIR_Op* op) { void LIR_Assembler::process_debug_info(LIR_Op* op) {
@ -327,8 +327,7 @@ void LIR_Assembler::process_debug_info(LIR_Op* op) {
if (vstack == NULL) return; if (vstack == NULL) return;
if (_pending_non_safepoint != NULL) { if (_pending_non_safepoint != NULL) {
// Got some old debug info. Get rid of it. // Got some old debug info. Get rid of it.
if (_pending_non_safepoint->bci() == src->bci() && if (debug_info(_pending_non_safepoint) == vstack) {
debug_info(_pending_non_safepoint) == vstack) {
_pending_non_safepoint_offset = pc_offset; _pending_non_safepoint_offset = pc_offset;
return; return;
} }
@ -358,7 +357,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
ValueStack* tc = t->caller_state(); ValueStack* tc = t->caller_state();
if (tc == NULL) return s; if (tc == NULL) return s;
t = tc; t = tc;
bci_result = s->scope()->caller_bci(); bci_result = tc->bci();
s = s->caller_state(); s = s->caller_state();
} }
} }
@ -366,7 +365,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
void LIR_Assembler::record_non_safepoint_debug_info() { void LIR_Assembler::record_non_safepoint_debug_info() {
int pc_offset = _pending_non_safepoint_offset; int pc_offset = _pending_non_safepoint_offset;
ValueStack* vstack = debug_info(_pending_non_safepoint); ValueStack* vstack = debug_info(_pending_non_safepoint);
int bci = _pending_non_safepoint->bci(); int bci = vstack->bci();
DebugInformationRecorder* debug_info = compilation()->debug_info_recorder(); DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();
assert(debug_info->recording_non_safepoints(), "sanity"); assert(debug_info->recording_non_safepoints(), "sanity");
@ -380,7 +379,7 @@ void LIR_Assembler::record_non_safepoint_debug_info() {
if (s == NULL) break; if (s == NULL) break;
IRScope* scope = s->scope(); IRScope* scope = s->scope();
//Always pass false for reexecute since these ScopeDescs are never used for deopt //Always pass false for reexecute since these ScopeDescs are never used for deopt
debug_info->describe_scope(pc_offset, scope->method(), s_bci, false/*reexecute*/); debug_info->describe_scope(pc_offset, scope->method(), s->bci(), false/*reexecute*/);
} }
debug_info->end_non_safepoint(pc_offset); debug_info->end_non_safepoint(pc_offset);

View File

@ -386,18 +386,26 @@ void LIRGenerator::walk(Value instr) {
CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) { CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) {
int index; assert(state != NULL, "state must be defined");
Value value;
for_each_stack_value(state, index, value) {
assert(value->subst() == value, "missed substition");
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
walk(value);
assert(value->operand()->is_valid(), "must be evaluated now");
}
}
ValueStack* s = state; ValueStack* s = state;
int bci = x->bci();
for_each_state(s) { for_each_state(s) {
if (s->kind() == ValueStack::EmptyExceptionState) {
assert(s->stack_size() == 0 && s->locals_size() == 0 && (s->locks_size() == 0 || s->locks_size() == 1), "state must be empty");
continue;
}
int index;
Value value;
for_each_stack_value(s, index, value) {
assert(value->subst() == value, "missed substitution");
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
walk(value);
assert(value->operand()->is_valid(), "must be evaluated now");
}
}
int bci = s->bci();
IRScope* scope = s->scope(); IRScope* scope = s->scope();
ciMethod* method = scope->method(); ciMethod* method = scope->method();
@ -428,15 +436,14 @@ CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ig
} }
} }
} }
bci = scope->caller_bci();
} }
return new CodeEmitInfo(x->bci(), state, ignore_xhandler ? NULL : x->exception_handlers()); return new CodeEmitInfo(state, ignore_xhandler ? NULL : x->exception_handlers());
} }
CodeEmitInfo* LIRGenerator::state_for(Instruction* x) { CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
return state_for(x, x->lock_stack()); return state_for(x, x->exception_state());
} }
@ -900,18 +907,14 @@ void LIRGenerator::move_to_phi(ValueStack* cur_state) {
Value sux_value; Value sux_value;
int index; int index;
assert(cur_state->scope() == sux_state->scope(), "not matching");
assert(cur_state->locals_size() == sux_state->locals_size(), "not matching");
assert(cur_state->stack_size() == sux_state->stack_size(), "not matching");
for_each_stack_value(sux_state, index, sux_value) { for_each_stack_value(sux_state, index, sux_value) {
move_to_phi(&resolver, cur_state->stack_at(index), sux_value); move_to_phi(&resolver, cur_state->stack_at(index), sux_value);
} }
// Inlining may cause the local state not to match up, so walk up
// the caller state until we get to the same scope as the
// successor and then start processing from there.
while (cur_state->scope() != sux_state->scope()) {
cur_state = cur_state->caller_state();
assert(cur_state != NULL, "scopes don't match up");
}
for_each_local_value(sux_state, index, sux_value) { for_each_local_value(sux_state, index, sux_value) {
move_to_phi(&resolver, cur_state->local_at(index), sux_value); move_to_phi(&resolver, cur_state->local_at(index), sux_value);
} }
@ -936,7 +939,6 @@ LIR_Opr LIRGenerator::new_register(BasicType type) {
} }
} }
_virtual_register_number += 1; _virtual_register_number += 1;
if (type == T_ADDRESS) type = T_INT;
return LIR_OprFact::virtual_register(vreg, type); return LIR_OprFact::virtual_register(vreg, type);
} }
@ -1024,10 +1026,10 @@ void LIRGenerator::do_Phi(Phi* x) {
// Code for a constant is generated lazily unless the constant is frequently used and can't be inlined. // Code for a constant is generated lazily unless the constant is frequently used and can't be inlined.
void LIRGenerator::do_Constant(Constant* x) { void LIRGenerator::do_Constant(Constant* x) {
if (x->state() != NULL) { if (x->state_before() != NULL) {
// Any constant with a ValueStack requires patching so emit the patch here // Any constant with a ValueStack requires patching so emit the patch here
LIR_Opr reg = rlock_result(x); LIR_Opr reg = rlock_result(x);
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state_before());
__ oop2reg_patch(NULL, reg, info); __ oop2reg_patch(NULL, reg, info);
} else if (x->use_count() > 1 && !can_inline_as_constant(x)) { } else if (x->use_count() > 1 && !can_inline_as_constant(x)) {
if (!x->is_pinned()) { if (!x->is_pinned()) {
@ -1103,7 +1105,7 @@ void LIRGenerator::do_getClass(Intrinsic* x) {
// need to perform the null check on the rcvr // need to perform the null check on the rcvr
CodeEmitInfo* info = NULL; CodeEmitInfo* info = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info = state_for(x, x->state()->copy_locks()); info = state_for(x);
} }
__ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info); __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info);
__ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() + __ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() +
@ -1348,6 +1350,7 @@ void LIRGenerator::G1SATBCardTableModRef_post_barrier(LIR_OprDesc* addr, LIR_Opr
addr = ptr; addr = ptr;
} }
assert(addr->is_register(), "must be a register at this point"); assert(addr->is_register(), "must be a register at this point");
assert(addr->type() == T_OBJECT, "addr should point to an object");
LIR_Opr xor_res = new_pointer_register(); LIR_Opr xor_res = new_pointer_register();
LIR_Opr xor_shift_res = new_pointer_register(); LIR_Opr xor_shift_res = new_pointer_register();
@ -1482,7 +1485,7 @@ void LIRGenerator::do_StoreField(StoreField* x) {
} else if (x->needs_null_check()) { } else if (x->needs_null_check()) {
NullCheck* nc = x->explicit_null_check(); NullCheck* nc = x->explicit_null_check();
if (nc == NULL) { if (nc == NULL) {
info = state_for(x, x->lock_stack()); info = state_for(x);
} else { } else {
info = state_for(nc); info = state_for(nc);
} }
@ -1510,10 +1513,12 @@ void LIRGenerator::do_StoreField(StoreField* x) {
set_no_result(x); set_no_result(x);
#ifndef PRODUCT
if (PrintNotLoaded && needs_patching) { if (PrintNotLoaded && needs_patching) {
tty->print_cr(" ###class not loaded at store_%s bci %d", tty->print_cr(" ###class not loaded at store_%s bci %d",
x->is_static() ? "static" : "field", x->bci()); x->is_static() ? "static" : "field", x->printable_bci());
} }
#endif
if (x->needs_null_check() && if (x->needs_null_check() &&
(needs_patching || (needs_patching ||
@ -1576,7 +1581,7 @@ void LIRGenerator::do_LoadField(LoadField* x) {
} else if (x->needs_null_check()) { } else if (x->needs_null_check()) {
NullCheck* nc = x->explicit_null_check(); NullCheck* nc = x->explicit_null_check();
if (nc == NULL) { if (nc == NULL) {
info = state_for(x, x->lock_stack()); info = state_for(x);
} else { } else {
info = state_for(nc); info = state_for(nc);
} }
@ -1586,10 +1591,12 @@ void LIRGenerator::do_LoadField(LoadField* x) {
object.load_item(); object.load_item();
#ifndef PRODUCT
if (PrintNotLoaded && needs_patching) { if (PrintNotLoaded && needs_patching) {
tty->print_cr(" ###class not loaded at load_%s bci %d", tty->print_cr(" ###class not loaded at load_%s bci %d",
x->is_static() ? "static" : "field", x->bci()); x->is_static() ? "static" : "field", x->printable_bci());
} }
#endif
if (x->needs_null_check() && if (x->needs_null_check() &&
(needs_patching || (needs_patching ||
@ -1782,7 +1789,7 @@ void LIRGenerator::do_Throw(Throw* x) {
if (GenerateCompilerNullChecks && if (GenerateCompilerNullChecks &&
(x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) { (x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) {
// if the exception object wasn't created using new then it might be null. // if the exception object wasn't created using new then it might be null.
__ null_check(exception_opr, new CodeEmitInfo(info, true)); __ null_check(exception_opr, new CodeEmitInfo(info, x->state()->copy(ValueStack::ExceptionState, x->state()->bci())));
} }
if (compilation()->env()->jvmti_can_post_on_exceptions()) { if (compilation()->env()->jvmti_can_post_on_exceptions()) {
@ -2128,7 +2135,6 @@ void LIRGenerator::do_TableSwitch(TableSwitch* x) {
int lo_key = x->lo_key(); int lo_key = x->lo_key();
int hi_key = x->hi_key(); int hi_key = x->hi_key();
int len = x->length(); int len = x->length();
CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr value = tag.result(); LIR_Opr value = tag.result();
if (UseTableRanges) { if (UseTableRanges) {
do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux()); do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux());
@ -2187,7 +2193,7 @@ void LIRGenerator::do_Goto(Goto* x) {
// increment backedge counter if needed // increment backedge counter if needed
CodeEmitInfo* info = state_for(x, state); CodeEmitInfo* info = state_for(x, state);
increment_backedge_counter(info, info->bci()); increment_backedge_counter(info, info->stack()->bci());
CodeEmitInfo* safepoint_info = state_for(x, state); CodeEmitInfo* safepoint_info = state_for(x, state);
__ safepoint(safepoint_poll_register(), safepoint_info); __ safepoint(safepoint_poll_register(), safepoint_info);
} }
@ -2294,7 +2300,7 @@ void LIRGenerator::do_Base(Base* x) {
LIR_Opr lock = new_register(T_INT); LIR_Opr lock = new_register(T_INT);
__ load_stack_address_monitor(0, lock); __ load_stack_address_monitor(0, lock);
CodeEmitInfo* info = new CodeEmitInfo(SynchronizationEntryBCI, scope()->start()->state(), NULL); CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
CodeStub* slow_path = new MonitorEnterStub(obj, lock, info); CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
// receiver is guaranteed non-NULL so don't need CodeEmitInfo // receiver is guaranteed non-NULL so don't need CodeEmitInfo
@ -2304,7 +2310,7 @@ void LIRGenerator::do_Base(Base* x) {
// increment invocation counters if needed // increment invocation counters if needed
if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting. if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
CodeEmitInfo* info = new CodeEmitInfo(InvocationEntryBci, scope()->start()->state(), NULL); CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
increment_invocation_counter(info); increment_invocation_counter(info);
} }
@ -2464,7 +2470,7 @@ void LIRGenerator::do_Invoke(Invoke* x) {
break; break;
case Bytecodes::_invokedynamic: { case Bytecodes::_invokedynamic: {
ciBytecodeStream bcs(x->scope()->method()); ciBytecodeStream bcs(x->scope()->method());
bcs.force_bci(x->bci()); bcs.force_bci(x->state()->bci());
assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream"); assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream");
ciCPCache* cpcache = bcs.get_cpcache(); ciCPCache* cpcache = bcs.get_cpcache();
@ -2829,4 +2835,3 @@ LIR_Opr LIRGenerator::call_runtime(BasicTypeArray* signature, LIRItemList* args,
} }
return result; return result;
} }

View File

@ -2018,6 +2018,12 @@ LIR_Opr LinearScan::calc_operand_for_interval(const Interval* interval) {
return LIR_OprFact::single_cpu_oop(assigned_reg); return LIR_OprFact::single_cpu_oop(assigned_reg);
} }
case T_ADDRESS: {
assert(assigned_reg >= pd_first_cpu_reg && assigned_reg <= pd_last_cpu_reg, "no cpu register");
assert(interval->assigned_regHi() == any_reg, "must not have hi register");
return LIR_OprFact::single_cpu_address(assigned_reg);
}
#ifdef __SOFTFP__ #ifdef __SOFTFP__
case T_FLOAT: // fall through case T_FLOAT: // fall through
#endif // __SOFTFP__ #endif // __SOFTFP__
@ -2268,8 +2274,8 @@ void assert_equal(IRScopeDebugInfo* d1, IRScopeDebugInfo* d2) {
} }
void check_stack_depth(CodeEmitInfo* info, int stack_end) { void check_stack_depth(CodeEmitInfo* info, int stack_end) {
if (info->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) { if (info->stack()->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) {
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci()); Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
switch (code) { switch (code) {
case Bytecodes::_ifnull : // fall through case Bytecodes::_ifnull : // fall through
case Bytecodes::_ifnonnull : // fall through case Bytecodes::_ifnonnull : // fall through
@ -2373,7 +2379,7 @@ OopMap* LinearScan::compute_oop_map(IntervalWalker* iw, LIR_Op* op, CodeEmitInfo
// add oops from lock stack // add oops from lock stack
assert(info->stack() != NULL, "CodeEmitInfo must always have a stack"); assert(info->stack() != NULL, "CodeEmitInfo must always have a stack");
int locks_count = info->stack()->locks_size(); int locks_count = info->stack()->total_locks_size();
for (int i = 0; i < locks_count; i++) { for (int i = 0; i < locks_count; i++) {
map->set_oop(frame_map()->monitor_object_regname(i)); map->set_oop(frame_map()->monitor_object_regname(i));
} }
@ -2756,19 +2762,13 @@ int LinearScan::append_scope_value(int op_id, Value value, GrowableArray<ScopeVa
} }
IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end) { IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state) {
IRScopeDebugInfo* caller_debug_info = NULL; IRScopeDebugInfo* caller_debug_info = NULL;
int stack_begin, locks_begin;
ValueStack* caller_state = cur_scope->caller_state(); ValueStack* caller_state = cur_state->caller_state();
if (caller_state != NULL) { if (caller_state != NULL) {
// process recursively to compute outermost scope first // process recursively to compute outermost scope first
stack_begin = caller_state->stack_size(); caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state);
locks_begin = caller_state->locks_size();
caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state, cur_scope->caller_bci(), stack_begin, locks_begin);
} else {
stack_begin = 0;
locks_begin = 0;
} }
// initialize these to null. // initialize these to null.
@ -2779,7 +2779,7 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
GrowableArray<MonitorValue*>* monitors = NULL; GrowableArray<MonitorValue*>* monitors = NULL;
// describe local variable values // describe local variable values
int nof_locals = cur_scope->method()->max_locals(); int nof_locals = cur_state->locals_size();
if (nof_locals > 0) { if (nof_locals > 0) {
locals = new GrowableArray<ScopeValue*>(nof_locals); locals = new GrowableArray<ScopeValue*>(nof_locals);
@ -2794,45 +2794,41 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
} }
assert(locals->length() == cur_scope->method()->max_locals(), "wrong number of locals"); assert(locals->length() == cur_scope->method()->max_locals(), "wrong number of locals");
assert(locals->length() == cur_state->locals_size(), "wrong number of locals"); assert(locals->length() == cur_state->locals_size(), "wrong number of locals");
} else if (cur_scope->method()->max_locals() > 0) {
assert(cur_state->kind() == ValueStack::EmptyExceptionState, "should be");
nof_locals = cur_scope->method()->max_locals();
locals = new GrowableArray<ScopeValue*>(nof_locals);
for(int i = 0; i < nof_locals; i++) {
locals->append(&_illegal_value);
}
} }
// describe expression stack // describe expression stack
// int nof_stack = cur_state->stack_size();
// When we inline methods containing exception handlers, the
// "lock_stacks" are changed to preserve expression stack values
// in caller scopes when exception handlers are present. This
// can cause callee stacks to be smaller than caller stacks.
if (stack_end > innermost_state->stack_size()) {
stack_end = innermost_state->stack_size();
}
int nof_stack = stack_end - stack_begin;
if (nof_stack > 0) { if (nof_stack > 0) {
expressions = new GrowableArray<ScopeValue*>(nof_stack); expressions = new GrowableArray<ScopeValue*>(nof_stack);
int pos = stack_begin; int pos = 0;
while (pos < stack_end) { while (pos < nof_stack) {
Value expression = innermost_state->stack_at_inc(pos); Value expression = cur_state->stack_at_inc(pos);
append_scope_value(op_id, expression, expressions); append_scope_value(op_id, expression, expressions);
assert(expressions->length() + stack_begin == pos, "must match"); assert(expressions->length() == pos, "must match");
} }
assert(expressions->length() == cur_state->stack_size(), "wrong number of stack entries");
} }
// describe monitors // describe monitors
assert(locks_begin <= locks_end, "error in scope iteration"); int nof_locks = cur_state->locks_size();
int nof_locks = locks_end - locks_begin;
if (nof_locks > 0) { if (nof_locks > 0) {
int lock_offset = cur_state->caller_state() != NULL ? cur_state->caller_state()->total_locks_size() : 0;
monitors = new GrowableArray<MonitorValue*>(nof_locks); monitors = new GrowableArray<MonitorValue*>(nof_locks);
for (int i = locks_begin; i < locks_end; i++) { for (int i = 0; i < nof_locks; i++) {
monitors->append(location_for_monitor_index(i)); monitors->append(location_for_monitor_index(lock_offset + i));
} }
} }
return new IRScopeDebugInfo(cur_scope, cur_bci, locals, expressions, monitors, caller_debug_info); return new IRScopeDebugInfo(cur_scope, cur_state->bci(), locals, expressions, monitors, caller_debug_info);
} }
@ -2844,17 +2840,14 @@ void LinearScan::compute_debug_info(CodeEmitInfo* info, int op_id) {
assert(innermost_scope != NULL && innermost_state != NULL, "why is it missing?"); assert(innermost_scope != NULL && innermost_state != NULL, "why is it missing?");
int stack_end = innermost_state->stack_size(); DEBUG_ONLY(check_stack_depth(info, innermost_state->stack_size()));
int locks_end = innermost_state->locks_size();
DEBUG_ONLY(check_stack_depth(info, stack_end));
if (info->_scope_debug_info == NULL) { if (info->_scope_debug_info == NULL) {
// compute debug information // compute debug information
info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end); info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state);
} else { } else {
// debug information already set. Check that it is correct from the current point of view // debug information already set. Check that it is correct from the current point of view
DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end))); DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state)));
} }
} }

View File

@ -346,7 +346,7 @@ class LinearScan : public CompilationResourceObj {
int append_scope_value_for_operand(LIR_Opr opr, GrowableArray<ScopeValue*>* scope_values); int append_scope_value_for_operand(LIR_Opr opr, GrowableArray<ScopeValue*>* scope_values);
int append_scope_value(int op_id, Value value, GrowableArray<ScopeValue*>* scope_values); int append_scope_value(int op_id, Value value, GrowableArray<ScopeValue*>* scope_values);
IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end); IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state);
void compute_debug_info(CodeEmitInfo* info, int op_id); void compute_debug_info(CodeEmitInfo* info, int op_id);
void assign_reg_num(LIR_OpList* instructions, IntervalWalker* iw); void assign_reg_num(LIR_OpList* instructions, IntervalWalker* iw);

View File

@ -140,25 +140,27 @@ class CE_Eliminator: public BlockClosure {
// with an IfOp followed by a Goto // with an IfOp followed by a Goto
// cut if_ away and get node before // cut if_ away and get node before
Instruction* cur_end = if_->prev(block); Instruction* cur_end = if_->prev(block);
int bci = if_->bci();
// append constants of true- and false-block if necessary // append constants of true- and false-block if necessary
// clone constants because original block must not be destroyed // clone constants because original block must not be destroyed
assert((t_value != f_const && f_value != t_const) || t_const == f_const, "mismatch"); assert((t_value != f_const && f_value != t_const) || t_const == f_const, "mismatch");
if (t_value == t_const) { if (t_value == t_const) {
t_value = new Constant(t_const->type()); t_value = new Constant(t_const->type());
cur_end = cur_end->set_next(t_value, bci); NOT_PRODUCT(t_value->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(t_value);
} }
if (f_value == f_const) { if (f_value == f_const) {
f_value = new Constant(f_const->type()); f_value = new Constant(f_const->type());
cur_end = cur_end->set_next(f_value, bci); NOT_PRODUCT(f_value->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(f_value);
} }
// it is very unlikely that the condition can be statically decided // it is very unlikely that the condition can be statically decided
// (this was checked previously by the Canonicalizer), so always // (this was checked previously by the Canonicalizer), so always
// append IfOp // append IfOp
Value result = new IfOp(if_->x(), if_->cond(), if_->y(), t_value, f_value); Value result = new IfOp(if_->x(), if_->cond(), if_->y(), t_value, f_value);
cur_end = cur_end->set_next(result, bci); NOT_PRODUCT(result->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(result);
// append Goto to successor // append Goto to successor
ValueStack* state_before = if_->is_safepoint() ? if_->state_before() : NULL; ValueStack* state_before = if_->is_safepoint() ? if_->state_before() : NULL;
@ -167,16 +169,15 @@ class CE_Eliminator: public BlockClosure {
// prepare state for Goto // prepare state for Goto
ValueStack* goto_state = if_->state(); ValueStack* goto_state = if_->state();
while (sux_state->scope() != goto_state->scope()) { while (sux_state->scope() != goto_state->scope()) {
goto_state = goto_state->pop_scope(); goto_state = goto_state->caller_state();
assert(goto_state != NULL, "states do not match up"); assert(goto_state != NULL, "states do not match up");
} }
goto_state = goto_state->copy(); goto_state = goto_state->copy(ValueStack::StateAfter, goto_state->bci());
goto_state->push(result->type(), result); goto_state->push(result->type(), result);
assert(goto_state->is_same_across_scopes(sux_state), "states must match now"); assert(goto_state->is_same(sux_state), "states must match now");
goto_->set_state(goto_state); goto_->set_state(goto_state);
// Steal the bci for the goto from the sux cur_end = cur_end->set_next(goto_, goto_state->bci());
cur_end = cur_end->set_next(goto_, sux->bci());
// Adjust control flow graph // Adjust control flow graph
BlockBegin::disconnect_edge(block, t_block); BlockBegin::disconnect_edge(block, t_block);
@ -251,10 +252,8 @@ class BlockMerger: public BlockClosure {
// no phi functions must be present at beginning of sux // no phi functions must be present at beginning of sux
ValueStack* sux_state = sux->state(); ValueStack* sux_state = sux->state();
ValueStack* end_state = end->state(); ValueStack* end_state = end->state();
while (end_state->scope() != sux_state->scope()) {
// match up inlining level assert(end_state->scope() == sux_state->scope(), "scopes must match");
end_state = end_state->pop_scope();
}
assert(end_state->stack_size() == sux_state->stack_size(), "stack not equal"); assert(end_state->stack_size() == sux_state->stack_size(), "stack not equal");
assert(end_state->locals_size() == sux_state->locals_size(), "locals not equal"); assert(end_state->locals_size() == sux_state->locals_size(), "locals not equal");
@ -273,7 +272,7 @@ class BlockMerger: public BlockClosure {
Instruction* prev = end->prev(block); Instruction* prev = end->prev(block);
Instruction* next = sux->next(); Instruction* next = sux->next();
assert(prev->as_BlockEnd() == NULL, "must not be a BlockEnd"); assert(prev->as_BlockEnd() == NULL, "must not be a BlockEnd");
prev->set_next(next, next->bci()); prev->set_next(next);
sux->disconnect_from_graph(); sux->disconnect_from_graph();
block->set_end(sux->end()); block->set_end(sux->end());
// add exception handlers of deleted block, if any // add exception handlers of deleted block, if any
@ -337,7 +336,8 @@ class BlockMerger: public BlockClosure {
newif->set_state(if_->state()->copy()); newif->set_state(if_->state()->copy());
assert(prev->next() == if_, "must be guaranteed by above search"); assert(prev->next() == if_, "must be guaranteed by above search");
prev->set_next(newif, if_->bci()); NOT_PRODUCT(newif->set_printable_bci(if_->printable_bci()));
prev->set_next(newif);
block->set_end(newif); block->set_end(newif);
_merge_count++; _merge_count++;
@ -705,7 +705,7 @@ void NullCheckEliminator::iterate_one(BlockBegin* block) {
// visiting instructions which are references in other blocks or // visiting instructions which are references in other blocks or
// visiting instructions more than once. // visiting instructions more than once.
mark_visitable(instr); mark_visitable(instr);
if (instr->is_root() || instr->can_trap() || (instr->as_NullCheck() != NULL)) { if (instr->is_pinned() || instr->can_trap() || (instr->as_NullCheck() != NULL)) {
mark_visited(instr); mark_visited(instr);
instr->input_values_do(this); instr->input_values_do(this);
instr->visit(&_visitor); instr->visit(&_visitor);

View File

@ -28,55 +28,60 @@
// Implementation of ValueStack // Implementation of ValueStack
ValueStack::ValueStack(IRScope* scope, int locals_size, int max_stack_size) ValueStack::ValueStack(IRScope* scope, ValueStack* caller_state)
: _scope(scope) : _scope(scope)
, _locals(locals_size, NULL) , _caller_state(caller_state)
, _stack(max_stack_size) , _bci(-99)
, _lock_stack(false) , _kind(Parsing)
, _locks(1) , _locals(scope->method()->max_locals(), NULL)
, _stack(scope->method()->max_stack())
, _locks()
{ {
assert(scope != NULL, "scope must exist"); verify();
}
ValueStack* ValueStack::copy() {
ValueStack* s = new ValueStack(scope(), locals_size(), max_stack_size());
s->_stack.appendAll(&_stack);
s->_locks.appendAll(&_locks);
s->replace_locals(this);
return s;
} }
ValueStack* ValueStack::copy_locks() { ValueStack::ValueStack(ValueStack* copy_from, Kind kind, int bci)
int sz = scope()->lock_stack_size(); : _scope(copy_from->scope())
if (stack_size() == 0) { , _caller_state(copy_from->caller_state())
sz = 0; , _bci(bci)
, _kind(kind)
, _locals()
, _stack()
, _locks(copy_from->locks_size())
{
assert(kind != EmptyExceptionState || !Compilation::current()->env()->jvmti_can_access_local_variables(), "need locals");
if (kind != EmptyExceptionState) {
// only allocate space if we need to copy the locals-array
_locals = Values(copy_from->locals_size());
_locals.appendAll(&copy_from->_locals);
} }
ValueStack* s = new ValueStack(scope(), locals_size(), sz);
s->_lock_stack = true; if (kind != ExceptionState && kind != EmptyExceptionState) {
s->_locks.appendAll(&_locks); if (kind == Parsing) {
s->replace_locals(this); // stack will be modified, so reserve enough space to avoid resizing
if (sz > 0) { _stack = Values(scope()->method()->max_stack());
assert(sz <= stack_size(), "lock stack underflow"); } else {
for (int i = 0; i < sz; i++) { // stack will not be modified, so do not waste space
s->_stack.append(_stack[i]); _stack = Values(copy_from->stack_size());
} }
_stack.appendAll(&copy_from->_stack);
} }
return s;
_locks.appendAll(&copy_from->_locks);
verify();
} }
bool ValueStack::is_same(ValueStack* s) { bool ValueStack::is_same(ValueStack* s) {
assert(s != NULL, "state must exist"); if (scope() != s->scope()) return false;
assert(scope () == s->scope (), "scopes must correspond"); if (caller_state() != s->caller_state()) return false;
assert(locals_size() == s->locals_size(), "locals sizes must correspond");
return is_same_across_scopes(s);
}
if (locals_size() != s->locals_size()) return false;
if (stack_size() != s->stack_size()) return false;
if (locks_size() != s->locks_size()) return false;
bool ValueStack::is_same_across_scopes(ValueStack* s) {
assert(s != NULL, "state must exist");
assert(stack_size () == s->stack_size (), "stack sizes must correspond");
assert(locks_size () == s->locks_size (), "locks sizes must correspond");
// compare each stack element with the corresponding stack element of s // compare each stack element with the corresponding stack element of s
int index; int index;
Value value; Value value;
@ -89,12 +94,6 @@ bool ValueStack::is_same_across_scopes(ValueStack* s) {
return true; return true;
} }
ValueStack* ValueStack::caller_state() const {
return scope()->caller_state();
}
void ValueStack::clear_locals() { void ValueStack::clear_locals() {
for (int i = _locals.length() - 1; i >= 0; i--) { for (int i = _locals.length() - 1; i >= 0; i--) {
_locals.at_put(i, NULL); _locals.at_put(i, NULL);
@ -102,13 +101,6 @@ void ValueStack::clear_locals() {
} }
void ValueStack::replace_locals(ValueStack* with) {
assert(locals_size() == with->locals_size(), "number of locals must match");
for (int i = locals_size() - 1; i >= 0; i--) {
_locals.at_put(i, with->_locals.at(i));
}
}
void ValueStack::pin_stack_for_linear_scan() { void ValueStack::pin_stack_for_linear_scan() {
for_each_state_value(this, v, for_each_state_value(this, v,
if (v->as_Constant() == NULL && v->as_Local() == NULL) { if (v->as_Constant() == NULL && v->as_Local() == NULL) {
@ -123,33 +115,25 @@ void ValueStack::apply(Values list, ValueVisitor* f) {
for (int i = 0; i < list.length(); i++) { for (int i = 0; i < list.length(); i++) {
Value* va = list.adr_at(i); Value* va = list.adr_at(i);
Value v0 = *va; Value v0 = *va;
if (v0 != NULL) { if (v0 != NULL && !v0->type()->is_illegal()) {
if (!v0->type()->is_illegal()) { f->visit(va);
assert(v0->as_HiWord() == NULL, "should never see HiWord during traversal");
f->visit(va);
#ifdef ASSERT #ifdef ASSERT
Value v1 = *va; Value v1 = *va;
if (v0 != v1) { assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match");
assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match"); assert(!v1->type()->is_double_word() || list.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
if (v0->type()->is_double_word()) {
list.at_put(i + 1, v0->hi_word());
}
}
#endif #endif
if (v0->type()->is_double_word()) i++; if (v0->type()->is_double_word()) i++;
}
} }
} }
} }
void ValueStack::values_do(ValueVisitor* f) { void ValueStack::values_do(ValueVisitor* f) {
apply(_stack, f);
apply(_locks, f);
ValueStack* state = this; ValueStack* state = this;
for_each_state(state) { for_each_state(state) {
apply(state->_locals, f); apply(state->_locals, f);
apply(state->_stack, f);
apply(state->_locks, f);
} }
} }
@ -164,52 +148,26 @@ Values* ValueStack::pop_arguments(int argument_size) {
} }
int ValueStack::lock(IRScope* scope, Value obj) { int ValueStack::total_locks_size() const {
int num_locks = 0;
const ValueStack* state = this;
for_each_state(state) {
num_locks += state->locks_size();
}
return num_locks;
}
int ValueStack::lock(Value obj) {
_locks.push(obj); _locks.push(obj);
scope->set_min_number_of_locks(locks_size()); int num_locks = total_locks_size();
return locks_size() - 1; scope()->set_min_number_of_locks(num_locks);
return num_locks - 1;
} }
int ValueStack::unlock() { int ValueStack::unlock() {
_locks.pop(); _locks.pop();
return locks_size(); return total_locks_size();
}
ValueStack* ValueStack::push_scope(IRScope* scope) {
assert(scope->caller() == _scope, "scopes must have caller/callee relationship");
ValueStack* res = new ValueStack(scope,
scope->method()->max_locals(),
max_stack_size() + scope->method()->max_stack());
// Preserves stack and monitors.
res->_stack.appendAll(&_stack);
res->_locks.appendAll(&_locks);
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
return res;
}
ValueStack* ValueStack::pop_scope() {
assert(_scope->caller() != NULL, "scope must have caller");
IRScope* scope = _scope->caller();
int max_stack = max_stack_size() - _scope->method()->max_stack();
assert(max_stack >= 0, "stack underflow");
ValueStack* res = new ValueStack(scope,
scope->method()->max_locals(),
max_stack);
// Preserves stack and monitors. Restores local and store state from caller scope.
res->_stack.appendAll(&_stack);
res->_locks.appendAll(&_locks);
ValueStack* caller = caller_state();
if (caller != NULL) {
for (int i = 0; i < caller->_locals.length(); i++) {
res->_locals.at_put(i, caller->_locals.at(i));
}
assert(res->_locals.length() == res->scope()->method()->max_locals(), "just checking");
}
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
return res;
} }
@ -220,11 +178,7 @@ void ValueStack::setup_phi_for_stack(BlockBegin* b, int index) {
Value phi = new Phi(t, b, -index - 1); Value phi = new Phi(t, b, -index - 1);
_stack[index] = phi; _stack[index] = phi;
#ifdef ASSERT assert(!t->is_double_word() || _stack.at(index + 1) == NULL, "hi-word of doubleword value must be NULL");
if (t->is_double_word()) {
_stack[index + 1] = phi->hi_word();
}
#endif
} }
void ValueStack::setup_phi_for_local(BlockBegin* b, int index) { void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
@ -236,7 +190,9 @@ void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
} }
#ifndef PRODUCT #ifndef PRODUCT
void ValueStack::print() { void ValueStack::print() {
scope()->method()->print_name();
if (stack_is_empty()) { if (stack_is_empty()) {
tty->print_cr("empty stack"); tty->print_cr("empty stack");
} else { } else {
@ -244,18 +200,20 @@ void ValueStack::print() {
for (int i = 0; i < stack_size();) { for (int i = 0; i < stack_size();) {
Value t = stack_at_inc(i); Value t = stack_at_inc(i);
tty->print("%2d ", i); tty->print("%2d ", i);
tty->print("%c%d ", t->type()->tchar(), t->id());
ip.print_instr(t); ip.print_instr(t);
tty->cr(); tty->cr();
} }
} }
if (!no_active_locks()) { if (!no_active_locks()) {
InstructionPrinter ip; InstructionPrinter ip;
for (int i = 0; i < locks_size(); i--) { for (int i = 0; i < locks_size(); i++) {
Value t = lock_at(i); Value t = lock_at(i);
tty->print("lock %2d ", i); tty->print("lock %2d ", i);
if (t == NULL) { if (t == NULL) {
tty->print("this"); tty->print("this");
} else { } else {
tty->print("%c%d ", t->type()->tchar(), t->id());
ip.print_instr(t); ip.print_instr(t);
} }
tty->cr(); tty->cr();
@ -270,16 +228,55 @@ void ValueStack::print() {
tty->print("null"); tty->print("null");
i ++; i ++;
} else { } else {
tty->print("%c%d ", l->type()->tchar(), l->id());
ip.print_instr(l); ip.print_instr(l);
if (l->type()->is_illegal() || l->type()->is_single_word()) i ++; else i += 2; if (l->type()->is_illegal() || l->type()->is_single_word()) i ++; else i += 2;
} }
tty->cr(); tty->cr();
} }
} }
if (caller_state() != NULL) {
caller_state()->print();
}
} }
void ValueStack::verify() { void ValueStack::verify() {
Unimplemented(); assert(scope() != NULL, "scope must exist");
if (caller_state() != NULL) {
assert(caller_state()->scope() == scope()->caller(), "invalid caller scope");
caller_state()->verify();
}
if (kind() == Parsing) {
assert(bci() == -99, "bci not defined during parsing");
} else {
assert(bci() >= -1, "bci out of range");
assert(bci() < scope()->method()->code_size(), "bci out of range");
assert(bci() == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(bci())), "make sure bci points at a real bytecode");
assert(scope()->method()->liveness_at_bci(bci()).is_valid(), "liveness at bci must be valid");
}
int i;
for (i = 0; i < stack_size(); i++) {
Value v = _stack.at(i);
if (v == NULL) {
assert(_stack.at(i - 1)->type()->is_double_word(), "only hi-words are NULL on stack");
} else if (v->type()->is_double_word()) {
assert(_stack.at(i + 1) == NULL, "hi-word must be NULL");
}
}
for (i = 0; i < locals_size(); i++) {
Value v = _locals.at(i);
if (v != NULL && v->type()->is_double_word()) {
assert(_locals.at(i + 1) == NULL, "hi-word must be NULL");
}
}
for_each_state_value(this, v,
assert(v != NULL, "just test if state-iteration succeeds");
);
} }
#endif // PRODUCT #endif // PRODUCT

View File

@ -23,9 +23,23 @@
*/ */
class ValueStack: public CompilationResourceObj { class ValueStack: public CompilationResourceObj {
public:
enum Kind {
Parsing, // During abstract interpretation in GraphBuilder
CallerState, // Caller state when inlining
StateBefore, // Before before execution of instruction
StateAfter, // After execution of instruction
ExceptionState, // Exception handling of instruction
EmptyExceptionState, // Exception handling of instructions not covered by an xhandler
BlockBeginState // State of BlockBegin instruction with phi functions of this block
};
private: private:
IRScope* _scope; // the enclosing scope IRScope* _scope; // the enclosing scope
bool _lock_stack; // indicates that this ValueStack is for an exception site ValueStack* _caller_state;
int _bci;
Kind _kind;
Values _locals; // the locals Values _locals; // the locals
Values _stack; // the expression stack Values _stack; // the expression stack
Values _locks; // the monitor stack (holding the locked values) Values _locks; // the monitor stack (holding the locked values)
@ -36,100 +50,84 @@ class ValueStack: public CompilationResourceObj {
} }
Value check(ValueTag tag, Value t, Value h) { Value check(ValueTag tag, Value t, Value h) {
assert(h->as_HiWord()->lo_word() == t, "incorrect stack pair"); assert(h == NULL, "hi-word of doubleword value must be NULL");
return check(tag, t); return check(tag, t);
} }
// helper routine // helper routine
static void apply(Values list, ValueVisitor* f); static void apply(Values list, ValueVisitor* f);
// for simplified copying
ValueStack(ValueStack* copy_from, Kind kind, int bci);
public: public:
// creation // creation
ValueStack(IRScope* scope, int locals_size, int max_stack_size); ValueStack(IRScope* scope, ValueStack* caller_state);
ValueStack* copy() { return new ValueStack(this, _kind, _bci); }
ValueStack* copy(Kind new_kind, int new_bci) { return new ValueStack(this, new_kind, new_bci); }
ValueStack* copy_for_parsing() { return new ValueStack(this, Parsing, -99); }
void set_caller_state(ValueStack* s) {
assert(kind() == EmptyExceptionState ||
(Compilation::current()->env()->jvmti_can_access_local_variables() && kind() == ExceptionState),
"only EmptyExceptionStates can be modified");
_caller_state = s;
}
// merging
ValueStack* copy(); // returns a copy of this w/ cleared locals
ValueStack* copy_locks(); // returns a copy of this w/ cleared locals and stack
// Note that when inlining of methods with exception
// handlers is enabled, this stack may have a
// non-empty expression stack (size defined by
// scope()->lock_stack_size())
bool is_same(ValueStack* s); // returns true if this & s's types match (w/o checking locals) bool is_same(ValueStack* s); // returns true if this & s's types match (w/o checking locals)
bool is_same_across_scopes(ValueStack* s); // same as is_same but returns true even if stacks are in different scopes (used for block merging w/inlining)
// accessors // accessors
IRScope* scope() const { return _scope; } IRScope* scope() const { return _scope; }
bool is_lock_stack() const { return _lock_stack; } ValueStack* caller_state() const { return _caller_state; }
int bci() const { return _bci; }
Kind kind() const { return _kind; }
int locals_size() const { return _locals.length(); } int locals_size() const { return _locals.length(); }
int stack_size() const { return _stack.length(); } int stack_size() const { return _stack.length(); }
int locks_size() const { return _locks.length(); } int locks_size() const { return _locks.length(); }
int max_stack_size() const { return _stack.capacity(); }
bool stack_is_empty() const { return _stack.is_empty(); } bool stack_is_empty() const { return _stack.is_empty(); }
bool no_active_locks() const { return _locks.is_empty(); } bool no_active_locks() const { return _locks.is_empty(); }
ValueStack* caller_state() const; int total_locks_size() const;
// locals access // locals access
void clear_locals(); // sets all locals to NULL; void clear_locals(); // sets all locals to NULL;
// Kill local i. Also kill local i+1 if i was a long or double.
void invalidate_local(int i) { void invalidate_local(int i) {
Value x = _locals.at(i); assert(_locals.at(i)->type()->is_single_word() ||
if (x != NULL && x->type()->is_double_word()) { _locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
assert(_locals.at(i + 1)->as_HiWord()->lo_word() == x, "locals inconsistent");
_locals.at_put(i + 1, NULL);
}
_locals.at_put(i, NULL); _locals.at_put(i, NULL);
} }
Value local_at(int i) const {
Value load_local(int i) const {
Value x = _locals.at(i); Value x = _locals.at(i);
if (x != NULL && x->type()->is_illegal()) return NULL; assert(x == NULL || x->type()->is_single_word() ||
assert(x == NULL || x->as_HiWord() == NULL, "index points to hi word"); _locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
assert(x == NULL || x->type()->is_illegal() || x->type()->is_single_word() || x == _locals.at(i+1)->as_HiWord()->lo_word(), "locals inconsistent");
return x; return x;
} }
Value local_at(int i) const { return _locals.at(i); }
// Store x into local i.
void store_local(int i, Value x) { void store_local(int i, Value x) {
// Kill the old value // When overwriting local i, check if i - 1 was the start of a
invalidate_local(i); // double word local and kill it.
_locals.at_put(i, x);
// Writing a double word can kill other locals
if (x != NULL && x->type()->is_double_word()) {
// If x + i was the start of a double word local then kill i + 2.
Value x2 = _locals.at(i + 1);
if (x2 != NULL && x2->type()->is_double_word()) {
_locals.at_put(i + 2, NULL);
}
// If x is a double word local, also update i + 1.
#ifdef ASSERT
_locals.at_put(i + 1, x->hi_word());
#else
_locals.at_put(i + 1, NULL);
#endif
}
// If x - 1 was the start of a double word local then kill i - 1.
if (i > 0) { if (i > 0) {
Value prev = _locals.at(i - 1); Value prev = _locals.at(i - 1);
if (prev != NULL && prev->type()->is_double_word()) { if (prev != NULL && prev->type()->is_double_word()) {
_locals.at_put(i - 1, NULL); _locals.at_put(i - 1, NULL);
} }
} }
}
void replace_locals(ValueStack* with); _locals.at_put(i, x);
if (x->type()->is_double_word()) {
// hi-word of doubleword value is always NULL
_locals.at_put(i + 1, NULL);
}
}
// stack access // stack access
Value stack_at(int i) const { Value stack_at(int i) const {
Value x = _stack.at(i); Value x = _stack.at(i);
assert(x->as_HiWord() == NULL, "index points to hi word");
assert(x->type()->is_single_word() || assert(x->type()->is_single_word() ||
x->subst() == _stack.at(i+1)->as_HiWord()->lo_word(), "stack inconsistent"); _stack.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
return x; return x;
} }
@ -146,7 +144,6 @@ class ValueStack: public CompilationResourceObj {
void values_do(ValueVisitor* f); void values_do(ValueVisitor* f);
// untyped manipulation (for dup_x1, etc.) // untyped manipulation (for dup_x1, etc.)
void clear_stack() { _stack.clear(); }
void truncate_stack(int size) { _stack.trunc_to(size); } void truncate_stack(int size) { _stack.trunc_to(size); }
void raw_push(Value t) { _stack.push(t); } void raw_push(Value t) { _stack.push(t); }
Value raw_pop() { return _stack.pop(); } Value raw_pop() { return _stack.pop(); }
@ -156,15 +153,8 @@ class ValueStack: public CompilationResourceObj {
void fpush(Value t) { _stack.push(check(floatTag , t)); } void fpush(Value t) { _stack.push(check(floatTag , t)); }
void apush(Value t) { _stack.push(check(objectTag , t)); } void apush(Value t) { _stack.push(check(objectTag , t)); }
void rpush(Value t) { _stack.push(check(addressTag, t)); } void rpush(Value t) { _stack.push(check(addressTag, t)); }
#ifdef ASSERT
// in debug mode, use HiWord for 2-word values
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(new HiWord(t)); }
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(new HiWord(t)); }
#else
// in optimized mode, use NULL for 2-word values
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(NULL); } void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(NULL); }
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(NULL); } void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(NULL); }
#endif // ASSERT
void push(ValueType* type, Value t) { void push(ValueType* type, Value t) {
switch (type->tag()) { switch (type->tag()) {
@ -182,15 +172,8 @@ class ValueStack: public CompilationResourceObj {
Value fpop() { return check(floatTag , _stack.pop()); } Value fpop() { return check(floatTag , _stack.pop()); }
Value apop() { return check(objectTag , _stack.pop()); } Value apop() { return check(objectTag , _stack.pop()); }
Value rpop() { return check(addressTag, _stack.pop()); } Value rpop() { return check(addressTag, _stack.pop()); }
#ifdef ASSERT
// in debug mode, check for HiWord consistency
Value lpop() { Value h = _stack.pop(); return check(longTag , _stack.pop(), h); } Value lpop() { Value h = _stack.pop(); return check(longTag , _stack.pop(), h); }
Value dpop() { Value h = _stack.pop(); return check(doubleTag, _stack.pop(), h); } Value dpop() { Value h = _stack.pop(); return check(doubleTag, _stack.pop(), h); }
#else
// in optimized mode, ignore HiWord since it is NULL
Value lpop() { _stack.pop(); return check(longTag , _stack.pop()); }
Value dpop() { _stack.pop(); return check(doubleTag, _stack.pop()); }
#endif // ASSERT
Value pop(ValueType* type) { Value pop(ValueType* type) {
switch (type->tag()) { switch (type->tag()) {
@ -208,16 +191,10 @@ class ValueStack: public CompilationResourceObj {
Values* pop_arguments(int argument_size); Values* pop_arguments(int argument_size);
// locks access // locks access
int lock (IRScope* scope, Value obj); int lock (Value obj);
int unlock(); int unlock();
Value lock_at(int i) const { return _locks.at(i); } Value lock_at(int i) const { return _locks.at(i); }
// Inlining support
ValueStack* push_scope(IRScope* scope); // "Push" new scope, returning new resulting stack
// Preserves stack and locks, destroys locals
ValueStack* pop_scope(); // "Pop" topmost scope, returning new resulting stack
// Preserves stack and locks, destroys locals
// SSA form IR support // SSA form IR support
void setup_phi_for_stack(BlockBegin* b, int index); void setup_phi_for_stack(BlockBegin* b, int index);
void setup_phi_for_local(BlockBegin* b, int index); void setup_phi_for_local(BlockBegin* b, int index);
@ -298,16 +275,18 @@ class ValueStack: public CompilationResourceObj {
{ \ { \
int cur_index; \ int cur_index; \
ValueStack* cur_state = v_state; \ ValueStack* cur_state = v_state; \
Value v_value; \ Value v_value; \
{ \
for_each_stack_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \
for_each_state(cur_state) { \ for_each_state(cur_state) { \
for_each_local_value(cur_state, cur_index, v_value) { \ { \
v_code; \ for_each_local_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \ } \
{ \
for_each_stack_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \
} \ } \
} }

View File

@ -216,9 +216,6 @@
develop(bool, DeoptC1, true, \ develop(bool, DeoptC1, true, \
"Use deoptimization in C1") \ "Use deoptimization in C1") \
\ \
develop(bool, DeoptOnAsyncException, true, \
"Deoptimize upon Thread.stop(); improves precision of IR") \
\
develop(bool, PrintBailouts, false, \ develop(bool, PrintBailouts, false, \
"Print bailout and its reason") \ "Print bailout and its reason") \
\ \

View File

@ -471,7 +471,7 @@ int ciInstanceKlass::compute_nonstatic_fields() {
ciField* field = fields->at(i); ciField* field = fields->at(i);
int offset = field->offset_in_bytes(); int offset = field->offset_in_bytes();
int size = (field->_type == NULL) ? heapOopSize : field->size_in_bytes(); int size = (field->_type == NULL) ? heapOopSize : field->size_in_bytes();
assert(last_offset <= offset, "no field overlap"); assert(last_offset <= offset, err_msg("no field overlap: %d <= %d", last_offset, offset));
if (last_offset > (int)sizeof(oopDesc)) if (last_offset > (int)sizeof(oopDesc))
assert((offset - last_offset) < BytesPerLong, "no big holes"); assert((offset - last_offset) < BytesPerLong, "no big holes");
// Note: Two consecutive T_BYTE fields will be separated by wordSize-1 // Note: Two consecutive T_BYTE fields will be separated by wordSize-1

View File

@ -735,7 +735,11 @@ int ciMethod::scale_count(int count, float prof_factor) {
// Return true if the method is an instance of one of the two // Return true if the method is an instance of one of the two
// signature-polymorphic MethodHandle methods, invokeExact or invokeGeneric. // signature-polymorphic MethodHandle methods, invokeExact or invokeGeneric.
bool ciMethod::is_method_handle_invoke() const { bool ciMethod::is_method_handle_invoke() const {
if (!is_loaded()) return false; if (!is_loaded()) {
bool flag = (holder()->name() == ciSymbol::java_dyn_MethodHandle() &&
methodOopDesc::is_method_handle_invoke_name(name()->sid()));
return flag;
}
VM_ENTRY_MARK; VM_ENTRY_MARK;
return get_methodOop()->is_method_handle_invoke(); return get_methodOop()->is_method_handle_invoke();
} }
@ -975,7 +979,7 @@ int ciMethod::instructions_size(int comp_level) {
GUARDED_VM_ENTRY( GUARDED_VM_ENTRY(
nmethod* code = get_methodOop()->code(); nmethod* code = get_methodOop()->code();
if (code != NULL && (comp_level == CompLevel_any || comp_level == code->comp_level())) { if (code != NULL && (comp_level == CompLevel_any || comp_level == code->comp_level())) {
return code->code_end() - code->verified_entry_point(); return code->insts_end() - code->verified_entry_point();
} }
return 0; return 0;
) )

View File

@ -1945,7 +1945,7 @@ ciTypeFlow::ciTypeFlow(ciEnv* env, ciMethod* method, int osr_bci) {
_has_irreducible_entry = false; _has_irreducible_entry = false;
_osr_bci = osr_bci; _osr_bci = osr_bci;
_failure_reason = NULL; _failure_reason = NULL;
assert(start_bci() >= 0 && start_bci() < code_size() , "correct osr_bci argument"); assert(0 <= start_bci() && start_bci() < code_size() , err_msg("correct osr_bci argument: 0 <= %d < %d", start_bci(), code_size()));
_work_list = NULL; _work_list = NULL;
_ciblock_count = _methodBlocks->num_blocks(); _ciblock_count = _methodBlocks->num_blocks();

View File

@ -2702,13 +2702,15 @@ void ClassFileParser::java_dyn_MethodHandle_fix_pre(constantPoolHandle cp,
// Adjust the field type from byte to an unmanaged pointer. // Adjust the field type from byte to an unmanaged pointer.
assert(fac_ptr->nonstatic_byte_count > 0, ""); assert(fac_ptr->nonstatic_byte_count > 0, "");
fac_ptr->nonstatic_byte_count -= 1; fac_ptr->nonstatic_byte_count -= 1;
(*fields_ptr)->ushort_at_put(i + instanceKlass::signature_index_offset,
word_sig_index); (*fields_ptr)->ushort_at_put(i + instanceKlass::signature_index_offset, word_sig_index);
fac_ptr->nonstatic_word_count += 1; assert(wordSize == longSize || wordSize == jintSize, "ILP32 or LP64");
if (wordSize == longSize) fac_ptr->nonstatic_double_count += 1;
else fac_ptr->nonstatic_word_count += 1;
FieldAllocationType atype = (FieldAllocationType) (*fields_ptr)->ushort_at(i + instanceKlass::low_offset); FieldAllocationType atype = (FieldAllocationType) (*fields_ptr)->ushort_at(i + instanceKlass::low_offset);
assert(atype == NONSTATIC_BYTE, ""); assert(atype == NONSTATIC_BYTE, "");
FieldAllocationType new_atype = NONSTATIC_WORD; FieldAllocationType new_atype = (wordSize == longSize) ? NONSTATIC_DOUBLE : NONSTATIC_WORD;
(*fields_ptr)->ushort_at_put(i + instanceKlass::low_offset, new_atype); (*fields_ptr)->ushort_at_put(i + instanceKlass::low_offset, new_atype);
found_vmentry = true; found_vmentry = true;

View File

@ -1421,7 +1421,7 @@ void nmethod::flush() {
} }
#ifdef SHARK #ifdef SHARK
((SharkCompiler *) compiler())->free_compiled_method(instructions_begin()); ((SharkCompiler *) compiler())->free_compiled_method(insts_begin());
#endif // SHARK #endif // SHARK
((CodeBlob*)(this))->flush(); ((CodeBlob*)(this))->flush();

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -39,7 +39,7 @@ void ConcurrentMarkSweepPolicy::initialize_generations() {
if (_generations == NULL) if (_generations == NULL)
vm_exit_during_initialization("Unable to allocate gen spec"); vm_exit_during_initialization("Unable to allocate gen spec");
if (UseParNewGC && ParallelGCThreads > 0) { if (ParNewGeneration::in_use()) {
if (UseAdaptiveSizePolicy) { if (UseAdaptiveSizePolicy) {
_generations[0] = new GenerationSpec(Generation::ASParNew, _generations[0] = new GenerationSpec(Generation::ASParNew,
_initial_gen0_size, _max_gen0_size); _initial_gen0_size, _max_gen0_size);
@ -79,7 +79,7 @@ void ConcurrentMarkSweepPolicy::initialize_size_policy(size_t init_eden_size,
void ConcurrentMarkSweepPolicy::initialize_gc_policy_counters() { void ConcurrentMarkSweepPolicy::initialize_gc_policy_counters() {
// initialize the policy counters - 2 collectors, 3 generations // initialize the policy counters - 2 collectors, 3 generations
if (UseParNewGC && ParallelGCThreads > 0) { if (ParNewGeneration::in_use()) {
_gc_policy_counters = new GCPolicyCounters("ParNew:CMS", 2, 3); _gc_policy_counters = new GCPolicyCounters("ParNew:CMS", 2, 3);
} }
else { else {
@ -102,7 +102,7 @@ void ASConcurrentMarkSweepPolicy::initialize_gc_policy_counters() {
assert(size_policy() != NULL, "A size policy is required"); assert(size_policy() != NULL, "A size policy is required");
// initialize the policy counters - 2 collectors, 3 generations // initialize the policy counters - 2 collectors, 3 generations
if (UseParNewGC && ParallelGCThreads > 0) { if (ParNewGeneration::in_use()) {
_gc_policy_counters = new CMSGCAdaptivePolicyCounters("ParNew:CMS", 2, 3, _gc_policy_counters = new CMSGCAdaptivePolicyCounters("ParNew:CMS", 2, 3,
size_policy()); size_policy());
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -50,6 +50,18 @@ HeapWord* CMSPermGen::mem_allocate(size_t size) {
} }
} }
HeapWord* CMSPermGen::request_expand_and_allocate(Generation* gen,
size_t size,
GCCause::Cause prev_cause /* ignored */) {
HeapWord* obj = gen->expand_and_allocate(size, false);
if (gen->capacity() >= _capacity_expansion_limit) {
set_capacity_expansion_limit(gen->capacity() + MaxPermHeapExpansion);
assert(((ConcurrentMarkSweepGeneration*)gen)->should_concurrent_collect(),
"Should kick off a collection if one not in progress");
}
return obj;
}
void CMSPermGen::compute_new_size() { void CMSPermGen::compute_new_size() {
_gen->compute_new_size(); _gen->compute_new_size();
} }

View File

@ -33,6 +33,10 @@ class CMSPermGen: public PermGen {
// The "generation" view. // The "generation" view.
ConcurrentMarkSweepGeneration* _gen; ConcurrentMarkSweepGeneration* _gen;
// Override default implementation from PermGen
virtual HeapWord* request_expand_and_allocate(Generation* gen, size_t size,
GCCause::Cause prev_cause);
public: public:
CMSPermGen(ReservedSpace rs, size_t initial_byte_size, CMSPermGen(ReservedSpace rs, size_t initial_byte_size,
CardTableRS* ct, FreeBlockDictionary::DictionaryChoice); CardTableRS* ct, FreeBlockDictionary::DictionaryChoice);

View File

@ -124,7 +124,8 @@ CompactibleFreeListSpace::CompactibleFreeListSpace(BlockOffsetSharedArray* bs,
checkFreeListConsistency(); checkFreeListConsistency();
// Initialize locks for parallel case. // Initialize locks for parallel case.
if (ParallelGCThreads > 0) {
if (CollectedHeap::use_parallel_gc_threads()) {
for (size_t i = IndexSetStart; i < IndexSetSize; i += IndexSetStride) { for (size_t i = IndexSetStart; i < IndexSetSize; i += IndexSetStride) {
_indexedFreeListParLocks[i] = new Mutex(Mutex::leaf - 1, // == ExpandHeap_lock - 1 _indexedFreeListParLocks[i] = new Mutex(Mutex::leaf - 1, // == ExpandHeap_lock - 1
"a freelist par lock", "a freelist par lock",
@ -1071,7 +1072,8 @@ bool CompactibleFreeListSpace::block_is_obj(const HeapWord* p) const {
// at address below "p" in finding the object that contains "p" // at address below "p" in finding the object that contains "p"
// and those objects (if garbage) may have been modified to hold // and those objects (if garbage) may have been modified to hold
// live range information. // live range information.
// assert(ParallelGCThreads > 0 || _bt.block_start(p) == p, "Should be a block boundary"); // assert(CollectedHeap::use_parallel_gc_threads() || _bt.block_start(p) == p,
// "Should be a block boundary");
if (FreeChunk::indicatesFreeChunk(p)) return false; if (FreeChunk::indicatesFreeChunk(p)) return false;
klassOop k = oop(p)->klass_or_null(); klassOop k = oop(p)->klass_or_null();
if (k != NULL) { if (k != NULL) {
@ -2932,7 +2934,9 @@ initialize_sequential_subtasks_for_rescan(int n_threads) {
"n_tasks calculation incorrect"); "n_tasks calculation incorrect");
SequentialSubTasksDone* pst = conc_par_seq_tasks(); SequentialSubTasksDone* pst = conc_par_seq_tasks();
assert(!pst->valid(), "Clobbering existing data?"); assert(!pst->valid(), "Clobbering existing data?");
pst->set_par_threads(n_threads); // Sets the condition for completion of the subtask (how many threads
// need to finish in order to be done).
pst->set_n_threads(n_threads);
pst->set_n_tasks((int)n_tasks); pst->set_n_tasks((int)n_tasks);
} }
@ -2972,6 +2976,8 @@ initialize_sequential_subtasks_for_marking(int n_threads,
"n_tasks calculation incorrect"); "n_tasks calculation incorrect");
SequentialSubTasksDone* pst = conc_par_seq_tasks(); SequentialSubTasksDone* pst = conc_par_seq_tasks();
assert(!pst->valid(), "Clobbering existing data?"); assert(!pst->valid(), "Clobbering existing data?");
pst->set_par_threads(n_threads); // Sets the condition for completion of the subtask (how many threads
// need to finish in order to be done).
pst->set_n_threads(n_threads);
pst->set_n_tasks((int)n_tasks); pst->set_n_tasks((int)n_tasks);
} }

View File

@ -195,7 +195,7 @@ ConcurrentMarkSweepGeneration::ConcurrentMarkSweepGeneration(
"Offset of FreeChunk::_prev within FreeChunk must match" "Offset of FreeChunk::_prev within FreeChunk must match"
" that of OopDesc::_klass within OopDesc"); " that of OopDesc::_klass within OopDesc");
) )
if (ParallelGCThreads > 0) { if (CollectedHeap::use_parallel_gc_threads()) {
typedef CMSParGCThreadState* CMSParGCThreadStatePtr; typedef CMSParGCThreadState* CMSParGCThreadStatePtr;
_par_gc_thread_states = _par_gc_thread_states =
NEW_C_HEAP_ARRAY(CMSParGCThreadStatePtr, ParallelGCThreads); NEW_C_HEAP_ARRAY(CMSParGCThreadStatePtr, ParallelGCThreads);
@ -540,8 +540,6 @@ CMSCollector::CMSCollector(ConcurrentMarkSweepGeneration* cmsGen,
_is_alive_closure(_span, &_markBitMap), _is_alive_closure(_span, &_markBitMap),
_restart_addr(NULL), _restart_addr(NULL),
_overflow_list(NULL), _overflow_list(NULL),
_preserved_oop_stack(NULL),
_preserved_mark_stack(NULL),
_stats(cmsGen), _stats(cmsGen),
_eden_chunk_array(NULL), // may be set in ctor body _eden_chunk_array(NULL), // may be set in ctor body
_eden_chunk_capacity(0), // -- ditto -- _eden_chunk_capacity(0), // -- ditto --
@ -616,7 +614,7 @@ CMSCollector::CMSCollector(ConcurrentMarkSweepGeneration* cmsGen,
} }
// Support for multi-threaded concurrent phases // Support for multi-threaded concurrent phases
if (ParallelGCThreads > 0 && CMSConcurrentMTEnabled) { if (CollectedHeap::use_parallel_gc_threads() && CMSConcurrentMTEnabled) {
if (FLAG_IS_DEFAULT(ConcGCThreads)) { if (FLAG_IS_DEFAULT(ConcGCThreads)) {
// just for now // just for now
FLAG_SET_DEFAULT(ConcGCThreads, (ParallelGCThreads + 3)/4); FLAG_SET_DEFAULT(ConcGCThreads, (ParallelGCThreads + 3)/4);
@ -628,6 +626,8 @@ CMSCollector::CMSCollector(ConcurrentMarkSweepGeneration* cmsGen,
warning("GC/CMS: _conc_workers allocation failure: " warning("GC/CMS: _conc_workers allocation failure: "
"forcing -CMSConcurrentMTEnabled"); "forcing -CMSConcurrentMTEnabled");
CMSConcurrentMTEnabled = false; CMSConcurrentMTEnabled = false;
} else {
_conc_workers->initialize_workers();
} }
} else { } else {
CMSConcurrentMTEnabled = false; CMSConcurrentMTEnabled = false;
@ -936,7 +936,7 @@ void ConcurrentMarkSweepGeneration::reset_after_compaction() {
// along with all the other pointers into the heap but // along with all the other pointers into the heap but
// compaction is expected to be a rare event with // compaction is expected to be a rare event with
// a heap using cms so don't do it without seeing the need. // a heap using cms so don't do it without seeing the need.
if (ParallelGCThreads > 0) { if (CollectedHeap::use_parallel_gc_threads()) {
for (uint i = 0; i < ParallelGCThreads; i++) { for (uint i = 0; i < ParallelGCThreads; i++) {
_par_gc_thread_states[i]->promo.reset(); _par_gc_thread_states[i]->promo.reset();
} }
@ -2630,7 +2630,8 @@ void CMSCollector::gc_prologue(bool full) {
// Should call gc_prologue_work() for all cms gens we are responsible for // Should call gc_prologue_work() for all cms gens we are responsible for
bool registerClosure = _collectorState >= Marking bool registerClosure = _collectorState >= Marking
&& _collectorState < Sweeping; && _collectorState < Sweeping;
ModUnionClosure* muc = ParallelGCThreads > 0 ? &_modUnionClosurePar ModUnionClosure* muc = CollectedHeap::use_parallel_gc_threads() ?
&_modUnionClosurePar
: &_modUnionClosure; : &_modUnionClosure;
_cmsGen->gc_prologue_work(full, registerClosure, muc); _cmsGen->gc_prologue_work(full, registerClosure, muc);
_permGen->gc_prologue_work(full, registerClosure, muc); _permGen->gc_prologue_work(full, registerClosure, muc);
@ -2731,7 +2732,7 @@ void ConcurrentMarkSweepGeneration::gc_epilogue(bool full) {
collector()->gc_epilogue(full); collector()->gc_epilogue(full);
// Also reset promotion tracking in par gc thread states. // Also reset promotion tracking in par gc thread states.
if (ParallelGCThreads > 0) { if (CollectedHeap::use_parallel_gc_threads()) {
for (uint i = 0; i < ParallelGCThreads; i++) { for (uint i = 0; i < ParallelGCThreads; i++) {
_par_gc_thread_states[i]->promo.stopTrackingPromotions(i); _par_gc_thread_states[i]->promo.stopTrackingPromotions(i);
} }
@ -3263,6 +3264,7 @@ HeapWord*
ConcurrentMarkSweepGeneration::expand_and_allocate(size_t word_size, ConcurrentMarkSweepGeneration::expand_and_allocate(size_t word_size,
bool tlab, bool tlab,
bool parallel) { bool parallel) {
CMSSynchronousYieldRequest yr;
assert(!tlab, "Can't deal with TLAB allocation"); assert(!tlab, "Can't deal with TLAB allocation");
MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag); MutexLockerEx x(freelistLock(), Mutex::_no_safepoint_check_flag);
expand(word_size*HeapWordSize, MinHeapDeltaBytes, expand(word_size*HeapWordSize, MinHeapDeltaBytes,
@ -3709,35 +3711,42 @@ class CMSConcMarkingTask;
class CMSConcMarkingTerminator: public ParallelTaskTerminator { class CMSConcMarkingTerminator: public ParallelTaskTerminator {
CMSCollector* _collector; CMSCollector* _collector;
CMSConcMarkingTask* _task; CMSConcMarkingTask* _task;
bool _yield;
protected:
virtual void yield();
public: public:
virtual void yield();
// "n_threads" is the number of threads to be terminated. // "n_threads" is the number of threads to be terminated.
// "queue_set" is a set of work queues of other threads. // "queue_set" is a set of work queues of other threads.
// "collector" is the CMS collector associated with this task terminator. // "collector" is the CMS collector associated with this task terminator.
// "yield" indicates whether we need the gang as a whole to yield. // "yield" indicates whether we need the gang as a whole to yield.
CMSConcMarkingTerminator(int n_threads, TaskQueueSetSuper* queue_set, CMSConcMarkingTerminator(int n_threads, TaskQueueSetSuper* queue_set, CMSCollector* collector) :
CMSCollector* collector, bool yield) :
ParallelTaskTerminator(n_threads, queue_set), ParallelTaskTerminator(n_threads, queue_set),
_collector(collector), _collector(collector) { }
_yield(yield) { }
void set_task(CMSConcMarkingTask* task) { void set_task(CMSConcMarkingTask* task) {
_task = task; _task = task;
} }
}; };
class CMSConcMarkingTerminatorTerminator: public TerminatorTerminator {
CMSConcMarkingTask* _task;
public:
bool should_exit_termination();
void set_task(CMSConcMarkingTask* task) {
_task = task;
}
};
// MT Concurrent Marking Task // MT Concurrent Marking Task
class CMSConcMarkingTask: public YieldingFlexibleGangTask { class CMSConcMarkingTask: public YieldingFlexibleGangTask {
CMSCollector* _collector; CMSCollector* _collector;
YieldingFlexibleWorkGang* _workers; // the whole gang
int _n_workers; // requested/desired # workers int _n_workers; // requested/desired # workers
bool _asynch; bool _asynch;
bool _result; bool _result;
CompactibleFreeListSpace* _cms_space; CompactibleFreeListSpace* _cms_space;
CompactibleFreeListSpace* _perm_space; CompactibleFreeListSpace* _perm_space;
HeapWord* _global_finger; char _pad_front[64]; // padding to ...
HeapWord* _global_finger; // ... avoid sharing cache line
char _pad_back[64];
HeapWord* _restart_addr; HeapWord* _restart_addr;
// Exposed here for yielding support // Exposed here for yielding support
@ -3745,28 +3754,30 @@ class CMSConcMarkingTask: public YieldingFlexibleGangTask {
// The per thread work queues, available here for stealing // The per thread work queues, available here for stealing
OopTaskQueueSet* _task_queues; OopTaskQueueSet* _task_queues;
// Termination (and yielding) support
CMSConcMarkingTerminator _term; CMSConcMarkingTerminator _term;
CMSConcMarkingTerminatorTerminator _term_term;
public: public:
CMSConcMarkingTask(CMSCollector* collector, CMSConcMarkingTask(CMSCollector* collector,
CompactibleFreeListSpace* cms_space, CompactibleFreeListSpace* cms_space,
CompactibleFreeListSpace* perm_space, CompactibleFreeListSpace* perm_space,
bool asynch, int n_workers, bool asynch,
YieldingFlexibleWorkGang* workers, YieldingFlexibleWorkGang* workers,
OopTaskQueueSet* task_queues): OopTaskQueueSet* task_queues):
YieldingFlexibleGangTask("Concurrent marking done multi-threaded"), YieldingFlexibleGangTask("Concurrent marking done multi-threaded"),
_collector(collector), _collector(collector),
_cms_space(cms_space), _cms_space(cms_space),
_perm_space(perm_space), _perm_space(perm_space),
_asynch(asynch), _n_workers(n_workers), _result(true), _asynch(asynch), _n_workers(0), _result(true),
_workers(workers), _task_queues(task_queues), _task_queues(task_queues),
_term(n_workers, task_queues, _collector, asynch), _term(_n_workers, task_queues, _collector),
_bit_map_lock(collector->bitMapLock()) _bit_map_lock(collector->bitMapLock())
{ {
assert(n_workers <= workers->total_workers(), _requested_size = _n_workers;
"Else termination won't work correctly today"); // XXX FIX ME!
_requested_size = n_workers;
_term.set_task(this); _term.set_task(this);
_term_term.set_task(this);
assert(_cms_space->bottom() < _perm_space->bottom(), assert(_cms_space->bottom() < _perm_space->bottom(),
"Finger incorrectly initialized below"); "Finger incorrectly initialized below");
_restart_addr = _global_finger = _cms_space->bottom(); _restart_addr = _global_finger = _cms_space->bottom();
@ -3781,7 +3792,16 @@ class CMSConcMarkingTask: public YieldingFlexibleGangTask {
CMSConcMarkingTerminator* terminator() { return &_term; } CMSConcMarkingTerminator* terminator() { return &_term; }
virtual void set_for_termination(int active_workers) {
terminator()->reset_for_reuse(active_workers);
}
void work(int i); void work(int i);
bool should_yield() {
return ConcurrentMarkSweepThread::should_yield()
&& !_collector->foregroundGCIsActive()
&& _asynch;
}
virtual void coordinator_yield(); // stuff done by coordinator virtual void coordinator_yield(); // stuff done by coordinator
bool result() { return _result; } bool result() { return _result; }
@ -3803,10 +3823,17 @@ class CMSConcMarkingTask: public YieldingFlexibleGangTask {
void bump_global_finger(HeapWord* f); void bump_global_finger(HeapWord* f);
}; };
bool CMSConcMarkingTerminatorTerminator::should_exit_termination() {
assert(_task != NULL, "Error");
return _task->yielding();
// Note that we do not need the disjunct || _task->should_yield() above
// because we want terminating threads to yield only if the task
// is already in the midst of yielding, which happens only after at least one
// thread has yielded.
}
void CMSConcMarkingTerminator::yield() { void CMSConcMarkingTerminator::yield() {
if (ConcurrentMarkSweepThread::should_yield() && if (_task->should_yield()) {
!_collector->foregroundGCIsActive() &&
_yield) {
_task->yield(); _task->yield();
} else { } else {
ParallelTaskTerminator::yield(); ParallelTaskTerminator::yield();
@ -4031,6 +4058,7 @@ void CMSConcMarkingTask::do_scan_and_mark(int i, CompactibleFreeListSpace* sp) {
class Par_ConcMarkingClosure: public Par_KlassRememberingOopClosure { class Par_ConcMarkingClosure: public Par_KlassRememberingOopClosure {
private: private:
CMSConcMarkingTask* _task;
MemRegion _span; MemRegion _span;
CMSBitMap* _bit_map; CMSBitMap* _bit_map;
CMSMarkStack* _overflow_stack; CMSMarkStack* _overflow_stack;
@ -4038,11 +4066,12 @@ class Par_ConcMarkingClosure: public Par_KlassRememberingOopClosure {
protected: protected:
DO_OOP_WORK_DEFN DO_OOP_WORK_DEFN
public: public:
Par_ConcMarkingClosure(CMSCollector* collector, OopTaskQueue* work_queue, Par_ConcMarkingClosure(CMSCollector* collector, CMSConcMarkingTask* task, OopTaskQueue* work_queue,
CMSBitMap* bit_map, CMSMarkStack* overflow_stack, CMSBitMap* bit_map, CMSMarkStack* overflow_stack,
CMSMarkStack* revisit_stack): CMSMarkStack* revisit_stack):
Par_KlassRememberingOopClosure(collector, NULL, revisit_stack), Par_KlassRememberingOopClosure(collector, NULL, revisit_stack),
_span(_collector->_span), _task(task),
_span(collector->_span),
_work_queue(work_queue), _work_queue(work_queue),
_bit_map(bit_map), _bit_map(bit_map),
_overflow_stack(overflow_stack) _overflow_stack(overflow_stack)
@ -4051,6 +4080,11 @@ class Par_ConcMarkingClosure: public Par_KlassRememberingOopClosure {
virtual void do_oop(narrowOop* p); virtual void do_oop(narrowOop* p);
void trim_queue(size_t max); void trim_queue(size_t max);
void handle_stack_overflow(HeapWord* lost); void handle_stack_overflow(HeapWord* lost);
void do_yield_check() {
if (_task->should_yield()) {
_task->yield();
}
}
}; };
// Grey object scanning during work stealing phase -- // Grey object scanning during work stealing phase --
@ -4094,6 +4128,7 @@ void Par_ConcMarkingClosure::do_oop(oop obj) {
handle_stack_overflow(addr); handle_stack_overflow(addr);
} }
} // Else, some other thread got there first } // Else, some other thread got there first
do_yield_check();
} }
} }
@ -4109,6 +4144,7 @@ void Par_ConcMarkingClosure::trim_queue(size_t max) {
assert(_span.contains((HeapWord*)new_oop), "Not in span"); assert(_span.contains((HeapWord*)new_oop), "Not in span");
assert(new_oop->is_parsable(), "Should be parsable"); assert(new_oop->is_parsable(), "Should be parsable");
new_oop->oop_iterate(this); // do_oop() above new_oop->oop_iterate(this); // do_oop() above
do_yield_check();
} }
} }
} }
@ -4136,7 +4172,7 @@ void CMSConcMarkingTask::do_work_steal(int i) {
CMSMarkStack* ovflw = &(_collector->_markStack); CMSMarkStack* ovflw = &(_collector->_markStack);
CMSMarkStack* revisit = &(_collector->_revisitStack); CMSMarkStack* revisit = &(_collector->_revisitStack);
int* seed = _collector->hash_seed(i); int* seed = _collector->hash_seed(i);
Par_ConcMarkingClosure cl(_collector, work_q, bm, ovflw, revisit); Par_ConcMarkingClosure cl(_collector, this, work_q, bm, ovflw, revisit);
while (true) { while (true) {
cl.trim_queue(0); cl.trim_queue(0);
assert(work_q->size() == 0, "Should have been emptied above"); assert(work_q->size() == 0, "Should have been emptied above");
@ -4149,9 +4185,11 @@ void CMSConcMarkingTask::do_work_steal(int i) {
assert(obj_to_scan->is_oop(), "Should be an oop"); assert(obj_to_scan->is_oop(), "Should be an oop");
assert(bm->isMarked((HeapWord*)obj_to_scan), "Grey object"); assert(bm->isMarked((HeapWord*)obj_to_scan), "Grey object");
obj_to_scan->oop_iterate(&cl); obj_to_scan->oop_iterate(&cl);
} else if (terminator()->offer_termination()) { } else if (terminator()->offer_termination(&_term_term)) {
assert(work_q->size() == 0, "Impossible!"); assert(work_q->size() == 0, "Impossible!");
break; break;
} else if (yielding() || should_yield()) {
yield();
} }
} }
} }
@ -4220,9 +4258,12 @@ bool CMSCollector::do_marking_mt(bool asynch) {
CompactibleFreeListSpace* cms_space = _cmsGen->cmsSpace(); CompactibleFreeListSpace* cms_space = _cmsGen->cmsSpace();
CompactibleFreeListSpace* perm_space = _permGen->cmsSpace(); CompactibleFreeListSpace* perm_space = _permGen->cmsSpace();
CMSConcMarkingTask tsk(this, cms_space, perm_space, CMSConcMarkingTask tsk(this,
asynch, num_workers /* number requested XXX */, cms_space,
conc_workers(), task_queues()); perm_space,
asynch,
conc_workers(),
task_queues());
// Since the actual number of workers we get may be different // Since the actual number of workers we get may be different
// from the number we requested above, do we need to do anything different // from the number we requested above, do we need to do anything different
@ -4326,6 +4367,10 @@ void CMSCollector::preclean() {
verify_overflow_empty(); verify_overflow_empty();
_abort_preclean = false; _abort_preclean = false;
if (CMSPrecleaningEnabled) { if (CMSPrecleaningEnabled) {
// Precleaning is currently not MT but the reference processor
// may be set for MT. Disable it temporarily here.
ReferenceProcessor* rp = ref_processor();
ReferenceProcessorMTProcMutator z(rp, false);
_eden_chunk_index = 0; _eden_chunk_index = 0;
size_t used = get_eden_used(); size_t used = get_eden_used();
size_t capacity = get_eden_capacity(); size_t capacity = get_eden_capacity();
@ -4918,7 +4963,7 @@ void CMSCollector::checkpointRootsFinalWork(bool asynch,
// dirtied since the first checkpoint in this GC cycle and prior to // dirtied since the first checkpoint in this GC cycle and prior to
// the most recent young generation GC, minus those cleaned up by the // the most recent young generation GC, minus those cleaned up by the
// concurrent precleaning. // concurrent precleaning.
if (CMSParallelRemarkEnabled && ParallelGCThreads > 0) { if (CMSParallelRemarkEnabled && CollectedHeap::use_parallel_gc_threads()) {
TraceTime t("Rescan (parallel) ", PrintGCDetails, false, gclog_or_tty); TraceTime t("Rescan (parallel) ", PrintGCDetails, false, gclog_or_tty);
do_remark_parallel(); do_remark_parallel();
} else { } else {
@ -5012,7 +5057,6 @@ void CMSCollector::checkpointRootsFinalWork(bool asynch,
// Parallel remark task // Parallel remark task
class CMSParRemarkTask: public AbstractGangTask { class CMSParRemarkTask: public AbstractGangTask {
CMSCollector* _collector; CMSCollector* _collector;
WorkGang* _workers;
int _n_workers; int _n_workers;
CompactibleFreeListSpace* _cms_space; CompactibleFreeListSpace* _cms_space;
CompactibleFreeListSpace* _perm_space; CompactibleFreeListSpace* _perm_space;
@ -5025,21 +5069,21 @@ class CMSParRemarkTask: public AbstractGangTask {
CMSParRemarkTask(CMSCollector* collector, CMSParRemarkTask(CMSCollector* collector,
CompactibleFreeListSpace* cms_space, CompactibleFreeListSpace* cms_space,
CompactibleFreeListSpace* perm_space, CompactibleFreeListSpace* perm_space,
int n_workers, WorkGang* workers, int n_workers, FlexibleWorkGang* workers,
OopTaskQueueSet* task_queues): OopTaskQueueSet* task_queues):
AbstractGangTask("Rescan roots and grey objects in parallel"), AbstractGangTask("Rescan roots and grey objects in parallel"),
_collector(collector), _collector(collector),
_cms_space(cms_space), _perm_space(perm_space), _cms_space(cms_space), _perm_space(perm_space),
_n_workers(n_workers), _n_workers(n_workers),
_workers(workers),
_task_queues(task_queues), _task_queues(task_queues),
_term(workers->total_workers(), task_queues) { } _term(n_workers, task_queues) { }
OopTaskQueueSet* task_queues() { return _task_queues; } OopTaskQueueSet* task_queues() { return _task_queues; }
OopTaskQueue* work_queue(int i) { return task_queues()->queue(i); } OopTaskQueue* work_queue(int i) { return task_queues()->queue(i); }
ParallelTaskTerminator* terminator() { return &_term; } ParallelTaskTerminator* terminator() { return &_term; }
int n_workers() { return _n_workers; }
void work(int i); void work(int i);
@ -5057,6 +5101,11 @@ class CMSParRemarkTask: public AbstractGangTask {
void do_work_steal(int i, Par_MarkRefsIntoAndScanClosure* cl, int* seed); void do_work_steal(int i, Par_MarkRefsIntoAndScanClosure* cl, int* seed);
}; };
// work_queue(i) is passed to the closure
// Par_MarkRefsIntoAndScanClosure. The "i" parameter
// also is passed to do_dirty_card_rescan_tasks() and to
// do_work_steal() to select the i-th task_queue.
void CMSParRemarkTask::work(int i) { void CMSParRemarkTask::work(int i) {
elapsedTimer _timer; elapsedTimer _timer;
ResourceMark rm; ResourceMark rm;
@ -5128,6 +5177,7 @@ void CMSParRemarkTask::work(int i) {
// Do the rescan tasks for each of the two spaces // Do the rescan tasks for each of the two spaces
// (cms_space and perm_space) in turn. // (cms_space and perm_space) in turn.
// "i" is passed to select the "i-th" task_queue
do_dirty_card_rescan_tasks(_cms_space, i, &par_mrias_cl); do_dirty_card_rescan_tasks(_cms_space, i, &par_mrias_cl);
do_dirty_card_rescan_tasks(_perm_space, i, &par_mrias_cl); do_dirty_card_rescan_tasks(_perm_space, i, &par_mrias_cl);
_timer.stop(); _timer.stop();
@ -5150,6 +5200,7 @@ void CMSParRemarkTask::work(int i) {
} }
} }
// Note that parameter "i" is not used.
void void
CMSParRemarkTask::do_young_space_rescan(int i, CMSParRemarkTask::do_young_space_rescan(int i,
Par_MarkRefsIntoAndScanClosure* cl, ContiguousSpace* space, Par_MarkRefsIntoAndScanClosure* cl, ContiguousSpace* space,
@ -5309,8 +5360,13 @@ CMSParRemarkTask::do_work_steal(int i, Par_MarkRefsIntoAndScanClosure* cl,
size_t num_from_overflow_list = MIN2((size_t)(work_q->max_elems() - work_q->size())/4, size_t num_from_overflow_list = MIN2((size_t)(work_q->max_elems() - work_q->size())/4,
(size_t)ParGCDesiredObjsFromOverflowList); (size_t)ParGCDesiredObjsFromOverflowList);
// Now check if there's any work in the overflow list // Now check if there's any work in the overflow list
// Passing ParallelGCThreads as the third parameter, no_of_gc_threads,
// only affects the number of attempts made to get work from the
// overflow list and does not affect the number of workers. Just
// pass ParallelGCThreads so this behavior is unchanged.
if (_collector->par_take_from_overflow_list(num_from_overflow_list, if (_collector->par_take_from_overflow_list(num_from_overflow_list,
work_q)) { work_q,
ParallelGCThreads)) {
// found something in global overflow list; // found something in global overflow list;
// not yet ready to go stealing work from others. // not yet ready to go stealing work from others.
// We'd like to assert(work_q->size() != 0, ...) // We'd like to assert(work_q->size() != 0, ...)
@ -5367,11 +5423,12 @@ void CMSCollector::reset_survivor_plab_arrays() {
// Merge the per-thread plab arrays into the global survivor chunk // Merge the per-thread plab arrays into the global survivor chunk
// array which will provide the partitioning of the survivor space // array which will provide the partitioning of the survivor space
// for CMS rescan. // for CMS rescan.
void CMSCollector::merge_survivor_plab_arrays(ContiguousSpace* surv) { void CMSCollector::merge_survivor_plab_arrays(ContiguousSpace* surv,
int no_of_gc_threads) {
assert(_survivor_plab_array != NULL, "Error"); assert(_survivor_plab_array != NULL, "Error");
assert(_survivor_chunk_array != NULL, "Error"); assert(_survivor_chunk_array != NULL, "Error");
assert(_collectorState == FinalMarking, "Error"); assert(_collectorState == FinalMarking, "Error");
for (uint j = 0; j < ParallelGCThreads; j++) { for (int j = 0; j < no_of_gc_threads; j++) {
_cursor[j] = 0; _cursor[j] = 0;
} }
HeapWord* top = surv->top(); HeapWord* top = surv->top();
@ -5379,7 +5436,7 @@ void CMSCollector::merge_survivor_plab_arrays(ContiguousSpace* surv) {
for (i = 0; i < _survivor_chunk_capacity; i++) { // all sca entries for (i = 0; i < _survivor_chunk_capacity; i++) { // all sca entries
HeapWord* min_val = top; // Higher than any PLAB address HeapWord* min_val = top; // Higher than any PLAB address
uint min_tid = 0; // position of min_val this round uint min_tid = 0; // position of min_val this round
for (uint j = 0; j < ParallelGCThreads; j++) { for (int j = 0; j < no_of_gc_threads; j++) {
ChunkArray* cur_sca = &_survivor_plab_array[j]; ChunkArray* cur_sca = &_survivor_plab_array[j];
if (_cursor[j] == cur_sca->end()) { if (_cursor[j] == cur_sca->end()) {
continue; continue;
@ -5413,7 +5470,7 @@ void CMSCollector::merge_survivor_plab_arrays(ContiguousSpace* surv) {
// Verify that we used up all the recorded entries // Verify that we used up all the recorded entries
#ifdef ASSERT #ifdef ASSERT
size_t total = 0; size_t total = 0;
for (uint j = 0; j < ParallelGCThreads; j++) { for (int j = 0; j < no_of_gc_threads; j++) {
assert(_cursor[j] == _survivor_plab_array[j].end(), "Ctl pt invariant"); assert(_cursor[j] == _survivor_plab_array[j].end(), "Ctl pt invariant");
total += _cursor[j]; total += _cursor[j];
} }
@ -5448,13 +5505,15 @@ initialize_sequential_subtasks_for_young_gen_rescan(int n_threads) {
// Each valid entry in [0, _eden_chunk_index) represents a task. // Each valid entry in [0, _eden_chunk_index) represents a task.
size_t n_tasks = _eden_chunk_index + 1; size_t n_tasks = _eden_chunk_index + 1;
assert(n_tasks == 1 || _eden_chunk_array != NULL, "Error"); assert(n_tasks == 1 || _eden_chunk_array != NULL, "Error");
pst->set_par_threads(n_threads); // Sets the condition for completion of the subtask (how many threads
// need to finish in order to be done).
pst->set_n_threads(n_threads);
pst->set_n_tasks((int)n_tasks); pst->set_n_tasks((int)n_tasks);
} }
// Merge the survivor plab arrays into _survivor_chunk_array // Merge the survivor plab arrays into _survivor_chunk_array
if (_survivor_plab_array != NULL) { if (_survivor_plab_array != NULL) {
merge_survivor_plab_arrays(dng->from()); merge_survivor_plab_arrays(dng->from(), n_threads);
} else { } else {
assert(_survivor_chunk_index == 0, "Error"); assert(_survivor_chunk_index == 0, "Error");
} }
@ -5463,7 +5522,9 @@ initialize_sequential_subtasks_for_young_gen_rescan(int n_threads) {
{ {
SequentialSubTasksDone* pst = dng->to()->par_seq_tasks(); SequentialSubTasksDone* pst = dng->to()->par_seq_tasks();
assert(!pst->valid(), "Clobbering existing data?"); assert(!pst->valid(), "Clobbering existing data?");
pst->set_par_threads(n_threads); // Sets the condition for completion of the subtask (how many threads
// need to finish in order to be done).
pst->set_n_threads(n_threads);
pst->set_n_tasks(1); pst->set_n_tasks(1);
assert(pst->valid(), "Error"); assert(pst->valid(), "Error");
} }
@ -5474,7 +5535,9 @@ initialize_sequential_subtasks_for_young_gen_rescan(int n_threads) {
assert(!pst->valid(), "Clobbering existing data?"); assert(!pst->valid(), "Clobbering existing data?");
size_t n_tasks = _survivor_chunk_index + 1; size_t n_tasks = _survivor_chunk_index + 1;
assert(n_tasks == 1 || _survivor_chunk_array != NULL, "Error"); assert(n_tasks == 1 || _survivor_chunk_array != NULL, "Error");
pst->set_par_threads(n_threads); // Sets the condition for completion of the subtask (how many threads
// need to finish in order to be done).
pst->set_n_threads(n_threads);
pst->set_n_tasks((int)n_tasks); pst->set_n_tasks((int)n_tasks);
assert(pst->valid(), "Error"); assert(pst->valid(), "Error");
} }
@ -5483,7 +5546,7 @@ initialize_sequential_subtasks_for_young_gen_rescan(int n_threads) {
// Parallel version of remark // Parallel version of remark
void CMSCollector::do_remark_parallel() { void CMSCollector::do_remark_parallel() {
GenCollectedHeap* gch = GenCollectedHeap::heap(); GenCollectedHeap* gch = GenCollectedHeap::heap();
WorkGang* workers = gch->workers(); FlexibleWorkGang* workers = gch->workers();
assert(workers != NULL, "Need parallel worker threads."); assert(workers != NULL, "Need parallel worker threads.");
int n_workers = workers->total_workers(); int n_workers = workers->total_workers();
CompactibleFreeListSpace* cms_space = _cmsGen->cmsSpace(); CompactibleFreeListSpace* cms_space = _cmsGen->cmsSpace();
@ -5636,13 +5699,11 @@ void CMSCollector::do_remark_non_parallel() {
//////////////////////////////////////////////////////// ////////////////////////////////////////////////////////
// Parallel Reference Processing Task Proxy Class // Parallel Reference Processing Task Proxy Class
//////////////////////////////////////////////////////// ////////////////////////////////////////////////////////
class CMSRefProcTaskProxy: public AbstractGangTask { class CMSRefProcTaskProxy: public AbstractGangTaskWOopQueues {
typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask; typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask;
CMSCollector* _collector; CMSCollector* _collector;
CMSBitMap* _mark_bit_map; CMSBitMap* _mark_bit_map;
const MemRegion _span; const MemRegion _span;
OopTaskQueueSet* _task_queues;
ParallelTaskTerminator _term;
ProcessTask& _task; ProcessTask& _task;
public: public:
@ -5650,24 +5711,21 @@ public:
CMSCollector* collector, CMSCollector* collector,
const MemRegion& span, const MemRegion& span,
CMSBitMap* mark_bit_map, CMSBitMap* mark_bit_map,
int total_workers, AbstractWorkGang* workers,
OopTaskQueueSet* task_queues): OopTaskQueueSet* task_queues):
AbstractGangTask("Process referents by policy in parallel"), AbstractGangTaskWOopQueues("Process referents by policy in parallel",
task_queues),
_task(task), _task(task),
_collector(collector), _span(span), _mark_bit_map(mark_bit_map), _collector(collector), _span(span), _mark_bit_map(mark_bit_map)
_task_queues(task_queues),
_term(total_workers, task_queues)
{ {
assert(_collector->_span.equals(_span) && !_span.is_empty(), assert(_collector->_span.equals(_span) && !_span.is_empty(),
"Inconsistency in _span"); "Inconsistency in _span");
} }
OopTaskQueueSet* task_queues() { return _task_queues; } OopTaskQueueSet* task_queues() { return queues(); }
OopTaskQueue* work_queue(int i) { return task_queues()->queue(i); } OopTaskQueue* work_queue(int i) { return task_queues()->queue(i); }
ParallelTaskTerminator* terminator() { return &_term; }
void do_work_steal(int i, void do_work_steal(int i,
CMSParDrainMarkingStackClosure* drain, CMSParDrainMarkingStackClosure* drain,
CMSParKeepAliveClosure* keep_alive, CMSParKeepAliveClosure* keep_alive,
@ -5739,8 +5797,13 @@ void CMSRefProcTaskProxy::do_work_steal(int i,
size_t num_from_overflow_list = MIN2((size_t)(work_q->max_elems() - work_q->size())/4, size_t num_from_overflow_list = MIN2((size_t)(work_q->max_elems() - work_q->size())/4,
(size_t)ParGCDesiredObjsFromOverflowList); (size_t)ParGCDesiredObjsFromOverflowList);
// Now check if there's any work in the overflow list // Now check if there's any work in the overflow list
// Passing ParallelGCThreads as the third parameter, no_of_gc_threads,
// only affects the number of attempts made to get work from the
// overflow list and does not affect the number of workers. Just
// pass ParallelGCThreads so this behavior is unchanged.
if (_collector->par_take_from_overflow_list(num_from_overflow_list, if (_collector->par_take_from_overflow_list(num_from_overflow_list,
work_q)) { work_q,
ParallelGCThreads)) {
// Found something in global overflow list; // Found something in global overflow list;
// not yet ready to go stealing work from others. // not yet ready to go stealing work from others.
// We'd like to assert(work_q->size() != 0, ...) // We'd like to assert(work_q->size() != 0, ...)
@ -5773,13 +5836,12 @@ void CMSRefProcTaskProxy::do_work_steal(int i,
void CMSRefProcTaskExecutor::execute(ProcessTask& task) void CMSRefProcTaskExecutor::execute(ProcessTask& task)
{ {
GenCollectedHeap* gch = GenCollectedHeap::heap(); GenCollectedHeap* gch = GenCollectedHeap::heap();
WorkGang* workers = gch->workers(); FlexibleWorkGang* workers = gch->workers();
assert(workers != NULL, "Need parallel worker threads."); assert(workers != NULL, "Need parallel worker threads.");
int n_workers = workers->total_workers();
CMSRefProcTaskProxy rp_task(task, &_collector, CMSRefProcTaskProxy rp_task(task, &_collector,
_collector.ref_processor()->span(), _collector.ref_processor()->span(),
_collector.markBitMap(), _collector.markBitMap(),
n_workers, _collector.task_queues()); workers, _collector.task_queues());
workers->run_task(&rp_task); workers->run_task(&rp_task);
} }
@ -5787,7 +5849,7 @@ void CMSRefProcTaskExecutor::execute(EnqueueTask& task)
{ {
GenCollectedHeap* gch = GenCollectedHeap::heap(); GenCollectedHeap* gch = GenCollectedHeap::heap();
WorkGang* workers = gch->workers(); FlexibleWorkGang* workers = gch->workers();
assert(workers != NULL, "Need parallel worker threads."); assert(workers != NULL, "Need parallel worker threads.");
CMSRefEnqueueTaskProxy enq_task(task); CMSRefEnqueueTaskProxy enq_task(task);
workers->run_task(&enq_task); workers->run_task(&enq_task);
@ -5814,6 +5876,14 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) {
{ {
TraceTime t("weak refs processing", PrintGCDetails, false, gclog_or_tty); TraceTime t("weak refs processing", PrintGCDetails, false, gclog_or_tty);
if (rp->processing_is_mt()) { if (rp->processing_is_mt()) {
// Set the degree of MT here. If the discovery is done MT, there
// may have been a different number of threads doing the discovery
// and a different number of discovered lists may have Ref objects.
// That is OK as long as the Reference lists are balanced (see
// balance_all_queues() and balance_queues()).
rp->set_mt_degree(ParallelGCThreads);
CMSRefProcTaskExecutor task_executor(*this); CMSRefProcTaskExecutor task_executor(*this);
rp->process_discovered_references(&_is_alive_closure, rp->process_discovered_references(&_is_alive_closure,
&cmsKeepAliveClosure, &cmsKeepAliveClosure,
@ -5874,6 +5944,7 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) {
rp->set_enqueuing_is_done(true); rp->set_enqueuing_is_done(true);
if (rp->processing_is_mt()) { if (rp->processing_is_mt()) {
rp->balance_all_queues();
CMSRefProcTaskExecutor task_executor(*this); CMSRefProcTaskExecutor task_executor(*this);
rp->enqueue_discovered_references(&task_executor); rp->enqueue_discovered_references(&task_executor);
} else { } else {
@ -8708,7 +8779,8 @@ bool CMSCollector::take_from_overflow_list(size_t num, CMSMarkStack* stack) {
// similar changes might be needed. // similar changes might be needed.
// CR 6797058 has been filed to consolidate the common code. // CR 6797058 has been filed to consolidate the common code.
bool CMSCollector::par_take_from_overflow_list(size_t num, bool CMSCollector::par_take_from_overflow_list(size_t num,
OopTaskQueue* work_q) { OopTaskQueue* work_q,
int no_of_gc_threads) {
assert(work_q->size() == 0, "First empty local work queue"); assert(work_q->size() == 0, "First empty local work queue");
assert(num < work_q->max_elems(), "Can't bite more than we can chew"); assert(num < work_q->max_elems(), "Can't bite more than we can chew");
if (_overflow_list == NULL) { if (_overflow_list == NULL) {
@ -8717,7 +8789,9 @@ bool CMSCollector::par_take_from_overflow_list(size_t num,
// Grab the entire list; we'll put back a suffix // Grab the entire list; we'll put back a suffix
oop prefix = (oop)Atomic::xchg_ptr(BUSY, &_overflow_list); oop prefix = (oop)Atomic::xchg_ptr(BUSY, &_overflow_list);
Thread* tid = Thread::current(); Thread* tid = Thread::current();
size_t CMSOverflowSpinCount = (size_t)ParallelGCThreads; // Before "no_of_gc_threads" was introduced CMSOverflowSpinCount was
// set to ParallelGCThreads.
size_t CMSOverflowSpinCount = (size_t) no_of_gc_threads; // was ParallelGCThreads;
size_t sleep_time_millis = MAX2((size_t)1, num/100); size_t sleep_time_millis = MAX2((size_t)1, num/100);
// If the list is busy, we spin for a short while, // If the list is busy, we spin for a short while,
// sleeping between attempts to get the list. // sleeping between attempts to get the list.
@ -8867,23 +8941,10 @@ void CMSCollector::par_push_on_overflow_list(oop p) {
// failures where possible, thus, incrementally hardening the VM // failures where possible, thus, incrementally hardening the VM
// in such low resource situations. // in such low resource situations.
void CMSCollector::preserve_mark_work(oop p, markOop m) { void CMSCollector::preserve_mark_work(oop p, markOop m) {
if (_preserved_oop_stack == NULL) { _preserved_oop_stack.push(p);
assert(_preserved_mark_stack == NULL, _preserved_mark_stack.push(m);
"bijection with preserved_oop_stack");
// Allocate the stacks
_preserved_oop_stack = new (ResourceObj::C_HEAP)
GrowableArray<oop>(PreserveMarkStackSize, true);
_preserved_mark_stack = new (ResourceObj::C_HEAP)
GrowableArray<markOop>(PreserveMarkStackSize, true);
if (_preserved_oop_stack == NULL || _preserved_mark_stack == NULL) {
vm_exit_out_of_memory(2* PreserveMarkStackSize * sizeof(oop) /* punt */,
"Preserved Mark/Oop Stack for CMS (C-heap)");
}
}
_preserved_oop_stack->push(p);
_preserved_mark_stack->push(m);
assert(m == p->mark(), "Mark word changed"); assert(m == p->mark(), "Mark word changed");
assert(_preserved_oop_stack->length() == _preserved_mark_stack->length(), assert(_preserved_oop_stack.size() == _preserved_mark_stack.size(),
"bijection"); "bijection");
} }
@ -8925,42 +8986,30 @@ void CMSCollector::par_preserve_mark_if_necessary(oop p) {
// effect on performance so great that this will // effect on performance so great that this will
// likely just be in the noise anyway. // likely just be in the noise anyway.
void CMSCollector::restore_preserved_marks_if_any() { void CMSCollector::restore_preserved_marks_if_any() {
if (_preserved_oop_stack == NULL) {
assert(_preserved_mark_stack == NULL,
"bijection with preserved_oop_stack");
return;
}
assert(SafepointSynchronize::is_at_safepoint(), assert(SafepointSynchronize::is_at_safepoint(),
"world should be stopped"); "world should be stopped");
assert(Thread::current()->is_ConcurrentGC_thread() || assert(Thread::current()->is_ConcurrentGC_thread() ||
Thread::current()->is_VM_thread(), Thread::current()->is_VM_thread(),
"should be single-threaded"); "should be single-threaded");
assert(_preserved_oop_stack.size() == _preserved_mark_stack.size(),
"bijection");
int length = _preserved_oop_stack->length(); while (!_preserved_oop_stack.is_empty()) {
assert(_preserved_mark_stack->length() == length, "bijection"); oop p = _preserved_oop_stack.pop();
for (int i = 0; i < length; i++) {
oop p = _preserved_oop_stack->at(i);
assert(p->is_oop(), "Should be an oop"); assert(p->is_oop(), "Should be an oop");
assert(_span.contains(p), "oop should be in _span"); assert(_span.contains(p), "oop should be in _span");
assert(p->mark() == markOopDesc::prototype(), assert(p->mark() == markOopDesc::prototype(),
"Set when taken from overflow list"); "Set when taken from overflow list");
markOop m = _preserved_mark_stack->at(i); markOop m = _preserved_mark_stack.pop();
p->set_mark(m); p->set_mark(m);
} }
_preserved_mark_stack->clear(); assert(_preserved_mark_stack.is_empty() && _preserved_oop_stack.is_empty(),
_preserved_oop_stack->clear();
assert(_preserved_mark_stack->is_empty() &&
_preserved_oop_stack->is_empty(),
"stacks were cleared above"); "stacks were cleared above");
} }
#ifndef PRODUCT #ifndef PRODUCT
bool CMSCollector::no_preserved_marks() const { bool CMSCollector::no_preserved_marks() const {
return ( ( _preserved_mark_stack == NULL return _preserved_mark_stack.is_empty() && _preserved_oop_stack.is_empty();
&& _preserved_oop_stack == NULL)
|| ( _preserved_mark_stack->is_empty()
&& _preserved_oop_stack->is_empty()));
} }
#endif #endif
@ -9256,4 +9305,3 @@ TraceCMSMemoryManagerStats::TraceCMSMemoryManagerStats(): TraceMemoryManagerStat
true /* recordGCEndTime */, true /* recordGCEndTime */,
true /* countCollection */ ); true /* countCollection */ );
} }

View File

@ -537,8 +537,8 @@ class CMSCollector: public CHeapObj {
// The following array-pair keeps track of mark words // The following array-pair keeps track of mark words
// displaced for accomodating overflow list above. // displaced for accomodating overflow list above.
// This code will likely be revisited under RFE#4922830. // This code will likely be revisited under RFE#4922830.
GrowableArray<oop>* _preserved_oop_stack; Stack<oop> _preserved_oop_stack;
GrowableArray<markOop>* _preserved_mark_stack; Stack<markOop> _preserved_mark_stack;
int* _hash_seed; int* _hash_seed;
@ -729,7 +729,9 @@ class CMSCollector: public CHeapObj {
// Support for marking stack overflow handling // Support for marking stack overflow handling
bool take_from_overflow_list(size_t num, CMSMarkStack* to_stack); bool take_from_overflow_list(size_t num, CMSMarkStack* to_stack);
bool par_take_from_overflow_list(size_t num, OopTaskQueue* to_work_q); bool par_take_from_overflow_list(size_t num,
OopTaskQueue* to_work_q,
int no_of_gc_threads);
void push_on_overflow_list(oop p); void push_on_overflow_list(oop p);
void par_push_on_overflow_list(oop p); void par_push_on_overflow_list(oop p);
// the following is, obviously, not, in general, "MT-stable" // the following is, obviously, not, in general, "MT-stable"
@ -768,7 +770,7 @@ class CMSCollector: public CHeapObj {
void abortable_preclean(); // Preclean while looking for possible abort void abortable_preclean(); // Preclean while looking for possible abort
void initialize_sequential_subtasks_for_young_gen_rescan(int i); void initialize_sequential_subtasks_for_young_gen_rescan(int i);
// Helper function for above; merge-sorts the per-thread plab samples // Helper function for above; merge-sorts the per-thread plab samples
void merge_survivor_plab_arrays(ContiguousSpace* surv); void merge_survivor_plab_arrays(ContiguousSpace* surv, int no_of_gc_threads);
// Resets (i.e. clears) the per-thread plab sample vectors // Resets (i.e. clears) the per-thread plab sample vectors
void reset_survivor_plab_arrays(); void reset_survivor_plab_arrays();

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2001, 2006, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -123,24 +123,44 @@ class ConcurrentMarkSweepThread: public ConcurrentGCThread {
// or given timeout, whichever is earlier. // or given timeout, whichever is earlier.
void wait_on_cms_lock(long t); // milliseconds void wait_on_cms_lock(long t); // milliseconds
// The CMS thread will yield during the work portion of it's cycle // The CMS thread will yield during the work portion of its cycle
// only when requested to. Both synchronous and asychronous requests // only when requested to. Both synchronous and asychronous requests
// are provided. A synchronous request is used for young gen // are provided:
// collections and direct allocations. The requesting thread increments // (1) A synchronous request is used for young gen collections and
// pending_yields at the beginning of an operation, and decrements it when // for direct allocations. The requesting thread increments
// the operation is completed. The CMS thread yields when pending_yields // _pending_yields at the beginning of an operation, and decrements
// is positive. An asynchronous request is used by iCMS in the stop_icms() // _pending_yields when that operation is completed.
// operation. A single yield satisfies the outstanding asynch yield requests. // In turn, the CMS thread yields when _pending_yields is positive,
// The requesting thread increments both pending_yields and pending_decrements. // and continues to yield until the value reverts to 0.
// After yielding, the CMS thread decrements both by the amount in // (2) An asynchronous request, on the other hand, is used by iCMS
// pending_decrements. // for the stop_icms() operation. A single yield satisfies all of
// the outstanding asynch yield requests, of which there may
// occasionally be several in close succession. To accomplish
// this, an asynch-requesting thread atomically increments both
// _pending_yields and _pending_decrements. An asynchr requesting
// thread does not wait and "acknowledge" completion of an operation
// and deregister the request, like the synchronous version described
// above does. In turn, after yielding, the CMS thread decrements both
// _pending_yields and _pending_decrements by the value seen in
// _pending_decrements before the decrement.
// NOTE: The above scheme is isomorphic to having two request counters,
// one for async requests and one for sync requests, and for the CMS thread
// to check the sum of the two counters to decide whether it should yield
// and to clear only the async counter when it yields. However, it turns out
// to be more efficient for CMS code to just check a single counter
// _pending_yields that holds the sum (of both sync and async requests), and
// a second counter _pending_decrements that only holds the async requests,
// for greater efficiency, since in a typical CMS run, there are many more
// pontential (i.e. static) yield points than there are actual
// (i.e. dynamic) yields because of requests, which are few and far between.
//
// Note that, while "_pending_yields >= _pending_decrements" is an invariant, // Note that, while "_pending_yields >= _pending_decrements" is an invariant,
// we cannot easily test that invariant, since the counters are manipulated via // we cannot easily test that invariant, since the counters are manipulated via
// atomic instructions without explicit locking and we cannot read // atomic instructions without explicit locking and we cannot read
// the two counters atomically together: one suggestion is to // the two counters atomically together: one suggestion is to
// use (for example) 16-bit counters so as to be able to read the // use (for example) 16-bit counters so as to be able to read the
// two counters atomically even on 32-bit platforms. Notice that // two counters atomically even on 32-bit platforms. Notice that
// the second assert in acknowledge_yield_request() does indeed // the second assert in acknowledge_yield_request() below does indeed
// check a form of the above invariant, albeit indirectly. // check a form of the above invariant, albeit indirectly.
static void increment_pending_yields() { static void increment_pending_yields() {
@ -152,6 +172,7 @@ class ConcurrentMarkSweepThread: public ConcurrentGCThread {
assert(_pending_yields >= 0, "can't be negative"); assert(_pending_yields >= 0, "can't be negative");
} }
static void asynchronous_yield_request() { static void asynchronous_yield_request() {
assert(CMSIncrementalMode, "Currently only used w/iCMS");
increment_pending_yields(); increment_pending_yields();
Atomic::inc(&_pending_decrements); Atomic::inc(&_pending_decrements);
assert(_pending_decrements >= 0, "can't be negative"); assert(_pending_decrements >= 0, "can't be negative");
@ -159,6 +180,7 @@ class ConcurrentMarkSweepThread: public ConcurrentGCThread {
static void acknowledge_yield_request() { static void acknowledge_yield_request() {
jint decrement = _pending_decrements; jint decrement = _pending_decrements;
if (decrement > 0) { if (decrement > 0) {
assert(CMSIncrementalMode, "Currently only used w/iCMS");
// Order important to preserve: _pending_yields >= _pending_decrements // Order important to preserve: _pending_yields >= _pending_decrements
Atomic::add(-decrement, &_pending_decrements); Atomic::add(-decrement, &_pending_decrements);
Atomic::add(-decrement, &_pending_yields); Atomic::add(-decrement, &_pending_yields);
@ -195,7 +217,7 @@ inline void ConcurrentMarkSweepThread::trace_state(const char* desc) {
} }
} }
// For scoped increment/decrement of yield requests // For scoped increment/decrement of (synchronous) yield requests
class CMSSynchronousYieldRequest: public StackObj { class CMSSynchronousYieldRequest: public StackObj {
public: public:
CMSSynchronousYieldRequest() { CMSSynchronousYieldRequest() {

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2001, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -278,15 +278,16 @@ CMRegionStack::~CMRegionStack() {
if (_base != NULL) FREE_C_HEAP_ARRAY(oop, _base); if (_base != NULL) FREE_C_HEAP_ARRAY(oop, _base);
} }
void CMRegionStack::push(MemRegion mr) { void CMRegionStack::push_lock_free(MemRegion mr) {
assert(mr.word_size() > 0, "Precondition"); assert(mr.word_size() > 0, "Precondition");
while (true) { while (true) {
if (isFull()) { jint index = _index;
if (index >= _capacity) {
_overflow = true; _overflow = true;
return; return;
} }
// Otherwise... // Otherwise...
jint index = _index;
jint next_index = index+1; jint next_index = index+1;
jint res = Atomic::cmpxchg(next_index, &_index, index); jint res = Atomic::cmpxchg(next_index, &_index, index);
if (res == index) { if (res == index) {
@ -297,19 +298,17 @@ void CMRegionStack::push(MemRegion mr) {
} }
} }
// Currently we do not call this at all. Normally we would call it // Lock-free pop of the region stack. Called during the concurrent
// during the concurrent marking / remark phases but we now call // marking / remark phases. Should only be called in tandem with
// the lock-based version instead. But we might want to resurrect this // other lock-free pops.
// code in the future. So, we'll leave it here commented out. MemRegion CMRegionStack::pop_lock_free() {
#if 0
MemRegion CMRegionStack::pop() {
while (true) { while (true) {
// Otherwise...
jint index = _index; jint index = _index;
if (index == 0) { if (index == 0) {
return MemRegion(); return MemRegion();
} }
// Otherwise...
jint next_index = index-1; jint next_index = index-1;
jint res = Atomic::cmpxchg(next_index, &_index, index); jint res = Atomic::cmpxchg(next_index, &_index, index);
if (res == index) { if (res == index) {
@ -326,7 +325,11 @@ MemRegion CMRegionStack::pop() {
// Otherwise, we need to try again. // Otherwise, we need to try again.
} }
} }
#endif // 0
#if 0
// The routines that manipulate the region stack with a lock are
// not currently used. They should be retained, however, as a
// diagnostic aid.
void CMRegionStack::push_with_lock(MemRegion mr) { void CMRegionStack::push_with_lock(MemRegion mr) {
assert(mr.word_size() > 0, "Precondition"); assert(mr.word_size() > 0, "Precondition");
@ -361,6 +364,7 @@ MemRegion CMRegionStack::pop_with_lock() {
} }
} }
} }
#endif
bool CMRegionStack::invalidate_entries_into_cset() { bool CMRegionStack::invalidate_entries_into_cset() {
bool result = false; bool result = false;
@ -583,10 +587,13 @@ ConcurrentMark::ConcurrentMark(ReservedSpace rs,
#endif #endif
guarantee(parallel_marking_threads() > 0, "peace of mind"); guarantee(parallel_marking_threads() > 0, "peace of mind");
_parallel_workers = new WorkGang("G1 Parallel Marking Threads", _parallel_workers = new FlexibleWorkGang("G1 Parallel Marking Threads",
(int) parallel_marking_threads(), false, true); (int) _parallel_marking_threads, false, true);
if (_parallel_workers == NULL) if (_parallel_workers == NULL) {
vm_exit_during_initialization("Failed necessary allocation."); vm_exit_during_initialization("Failed necessary allocation.");
} else {
_parallel_workers->initialize_workers();
}
} }
// so that the call below can read a sensible value // so that the call below can read a sensible value
@ -645,8 +652,9 @@ void ConcurrentMark::reset() {
// We do reset all of them, since different phases will use // We do reset all of them, since different phases will use
// different number of active threads. So, it's easiest to have all // different number of active threads. So, it's easiest to have all
// of them ready. // of them ready.
for (int i = 0; i < (int) _max_task_num; ++i) for (int i = 0; i < (int) _max_task_num; ++i) {
_tasks[i]->reset(_nextMarkBitMap); _tasks[i]->reset(_nextMarkBitMap);
}
// we need this to make sure that the flag is on during the evac // we need this to make sure that the flag is on during the evac
// pause with initial mark piggy-backed // pause with initial mark piggy-backed
@ -985,7 +993,7 @@ void ConcurrentMark::grayRegionIfNecessary(MemRegion mr) {
"below the finger, pushing it", "below the finger, pushing it",
mr.start(), mr.end()); mr.start(), mr.end());
if (!region_stack_push(mr)) { if (!region_stack_push_lock_free(mr)) {
if (verbose_low()) if (verbose_low())
gclog_or_tty->print_cr("[global] region stack has overflown."); gclog_or_tty->print_cr("[global] region stack has overflown.");
} }
@ -1451,7 +1459,7 @@ public:
_bm, _g1h->concurrent_mark(), _bm, _g1h->concurrent_mark(),
_region_bm, _card_bm); _region_bm, _card_bm);
calccl.no_yield(); calccl.no_yield();
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
_g1h->heap_region_par_iterate_chunked(&calccl, i, _g1h->heap_region_par_iterate_chunked(&calccl, i,
HeapRegion::FinalCountClaimValue); HeapRegion::FinalCountClaimValue);
} else { } else {
@ -1531,7 +1539,7 @@ public:
G1NoteEndOfConcMarkClosure g1_note_end(_g1h, G1NoteEndOfConcMarkClosure g1_note_end(_g1h,
&_par_cleanup_thread_state[i]->list, &_par_cleanup_thread_state[i]->list,
i); i);
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
_g1h->heap_region_par_iterate_chunked(&g1_note_end, i, _g1h->heap_region_par_iterate_chunked(&g1_note_end, i,
HeapRegion::NoteEndClaimValue); HeapRegion::NoteEndClaimValue);
} else { } else {
@ -1575,7 +1583,7 @@ public:
{} {}
void work(int i) { void work(int i) {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
_g1rs->scrub_par(_region_bm, _card_bm, i, _g1rs->scrub_par(_region_bm, _card_bm, i,
HeapRegion::ScrubRemSetClaimValue); HeapRegion::ScrubRemSetClaimValue);
} else { } else {
@ -1647,7 +1655,7 @@ void ConcurrentMark::cleanup() {
// Do counting once more with the world stopped for good measure. // Do counting once more with the world stopped for good measure.
G1ParFinalCountTask g1_par_count_task(g1h, nextMarkBitMap(), G1ParFinalCountTask g1_par_count_task(g1h, nextMarkBitMap(),
&_region_bm, &_card_bm); &_region_bm, &_card_bm);
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
assert(g1h->check_heap_region_claim_values( assert(g1h->check_heap_region_claim_values(
HeapRegion::InitialClaimValue), HeapRegion::InitialClaimValue),
"sanity check"); "sanity check");
@ -1695,7 +1703,7 @@ void ConcurrentMark::cleanup() {
// Note end of marking in all heap regions. // Note end of marking in all heap regions.
double note_end_start = os::elapsedTime(); double note_end_start = os::elapsedTime();
G1ParNoteEndTask g1_par_note_end_task(g1h, _par_cleanup_thread_state); G1ParNoteEndTask g1_par_note_end_task(g1h, _par_cleanup_thread_state);
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
int n_workers = g1h->workers()->total_workers(); int n_workers = g1h->workers()->total_workers();
g1h->set_par_threads(n_workers); g1h->set_par_threads(n_workers);
g1h->workers()->run_task(&g1_par_note_end_task); g1h->workers()->run_task(&g1_par_note_end_task);
@ -1720,7 +1728,7 @@ void ConcurrentMark::cleanup() {
if (G1ScrubRemSets) { if (G1ScrubRemSets) {
double rs_scrub_start = os::elapsedTime(); double rs_scrub_start = os::elapsedTime();
G1ParScrubRemSetTask g1_par_scrub_rs_task(g1h, &_region_bm, &_card_bm); G1ParScrubRemSetTask g1_par_scrub_rs_task(g1h, &_region_bm, &_card_bm);
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
int n_workers = g1h->workers()->total_workers(); int n_workers = g1h->workers()->total_workers();
g1h->set_par_threads(n_workers); g1h->set_par_threads(n_workers);
g1h->workers()->run_task(&g1_par_scrub_rs_task); g1h->workers()->run_task(&g1_par_scrub_rs_task);
@ -1934,7 +1942,7 @@ void ConcurrentMark::checkpointRootsFinalWork() {
g1h->ensure_parsability(false); g1h->ensure_parsability(false);
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
G1CollectedHeap::StrongRootsScope srs(g1h); G1CollectedHeap::StrongRootsScope srs(g1h);
// this is remark, so we'll use up all available threads // this is remark, so we'll use up all available threads
int active_workers = ParallelGCThreads; int active_workers = ParallelGCThreads;
@ -2330,6 +2338,39 @@ ConcurrentMark::claim_region(int task_num) {
return NULL; return NULL;
} }
bool ConcurrentMark::invalidate_aborted_regions_in_cset() {
bool result = false;
for (int i = 0; i < (int)_max_task_num; ++i) {
CMTask* the_task = _tasks[i];
MemRegion mr = the_task->aborted_region();
if (mr.start() != NULL) {
assert(mr.end() != NULL, "invariant");
assert(mr.word_size() > 0, "invariant");
HeapRegion* hr = _g1h->heap_region_containing(mr.start());
assert(hr != NULL, "invariant");
if (hr->in_collection_set()) {
// The region points into the collection set
the_task->set_aborted_region(MemRegion());
result = true;
}
}
}
return result;
}
bool ConcurrentMark::has_aborted_regions() {
for (int i = 0; i < (int)_max_task_num; ++i) {
CMTask* the_task = _tasks[i];
MemRegion mr = the_task->aborted_region();
if (mr.start() != NULL) {
assert(mr.end() != NULL, "invariant");
assert(mr.word_size() > 0, "invariant");
return true;
}
}
return false;
}
void ConcurrentMark::oops_do(OopClosure* cl) { void ConcurrentMark::oops_do(OopClosure* cl) {
if (_markStack.size() > 0 && verbose_low()) if (_markStack.size() > 0 && verbose_low())
gclog_or_tty->print_cr("[global] scanning the global marking stack, " gclog_or_tty->print_cr("[global] scanning the global marking stack, "
@ -2348,13 +2389,22 @@ void ConcurrentMark::oops_do(OopClosure* cl) {
queue->oops_do(cl); queue->oops_do(cl);
} }
// finally, invalidate any entries that in the region stack that // Invalidate any entries, that are in the region stack, that
// point into the collection set // point into the collection set
if (_regionStack.invalidate_entries_into_cset()) { if (_regionStack.invalidate_entries_into_cset()) {
// otherwise, any gray objects copied during the evacuation pause // otherwise, any gray objects copied during the evacuation pause
// might not be visited. // might not be visited.
assert(_should_gray_objects, "invariant"); assert(_should_gray_objects, "invariant");
} }
// Invalidate any aborted regions, recorded in the individual CM
// tasks, that point into the collection set.
if (invalidate_aborted_regions_in_cset()) {
// otherwise, any gray objects copied during the evacuation pause
// might not be visited.
assert(_should_gray_objects, "invariant");
}
} }
void ConcurrentMark::clear_marking_state() { void ConcurrentMark::clear_marking_state() {
@ -2635,7 +2685,7 @@ void ConcurrentMark::newCSet() {
// irrespective whether all collection set regions are below the // irrespective whether all collection set regions are below the
// finger, if the region stack is not empty. This is expected to be // finger, if the region stack is not empty. This is expected to be
// a rare case, so I don't think it's necessary to be smarted about it. // a rare case, so I don't think it's necessary to be smarted about it.
if (!region_stack_empty()) if (!region_stack_empty() || has_aborted_regions())
_should_gray_objects = true; _should_gray_objects = true;
} }
@ -2654,8 +2704,10 @@ void ConcurrentMark::abort() {
_nextMarkBitMap->clearAll(); _nextMarkBitMap->clearAll();
// Empty mark stack // Empty mark stack
clear_marking_state(); clear_marking_state();
for (int i = 0; i < (int)_max_task_num; ++i) for (int i = 0; i < (int)_max_task_num; ++i) {
_tasks[i]->clear_region_fields(); _tasks[i]->clear_region_fields();
_tasks[i]->clear_aborted_region();
}
_has_aborted = true; _has_aborted = true;
SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set(); SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set();
@ -2933,6 +2985,7 @@ void CMTask::reset(CMBitMap* nextMarkBitMap) {
_nextMarkBitMap = nextMarkBitMap; _nextMarkBitMap = nextMarkBitMap;
clear_region_fields(); clear_region_fields();
clear_aborted_region();
_calls = 0; _calls = 0;
_elapsed_time_ms = 0.0; _elapsed_time_ms = 0.0;
@ -3369,14 +3422,14 @@ void CMTask::drain_satb_buffers() {
CMObjectClosure oc(this); CMObjectClosure oc(this);
SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set(); SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set();
if (ParallelGCThreads > 0) if (G1CollectedHeap::use_parallel_gc_threads())
satb_mq_set.set_par_closure(_task_id, &oc); satb_mq_set.set_par_closure(_task_id, &oc);
else else
satb_mq_set.set_closure(&oc); satb_mq_set.set_closure(&oc);
// This keeps claiming and applying the closure to completed buffers // This keeps claiming and applying the closure to completed buffers
// until we run out of buffers or we need to abort. // until we run out of buffers or we need to abort.
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
while (!has_aborted() && while (!has_aborted() &&
satb_mq_set.par_apply_closure_to_completed_buffer(_task_id)) { satb_mq_set.par_apply_closure_to_completed_buffer(_task_id)) {
if (_cm->verbose_medium()) if (_cm->verbose_medium())
@ -3396,7 +3449,7 @@ void CMTask::drain_satb_buffers() {
if (!concurrent() && !has_aborted()) { if (!concurrent() && !has_aborted()) {
// We should only do this during remark. // We should only do this during remark.
if (ParallelGCThreads > 0) if (G1CollectedHeap::use_parallel_gc_threads())
satb_mq_set.par_iterate_closure_all_threads(_task_id); satb_mq_set.par_iterate_closure_all_threads(_task_id);
else else
satb_mq_set.iterate_closure_all_threads(); satb_mq_set.iterate_closure_all_threads();
@ -3408,7 +3461,7 @@ void CMTask::drain_satb_buffers() {
concurrent() || concurrent() ||
satb_mq_set.completed_buffers_num() == 0, "invariant"); satb_mq_set.completed_buffers_num() == 0, "invariant");
if (ParallelGCThreads > 0) if (G1CollectedHeap::use_parallel_gc_threads())
satb_mq_set.set_par_closure(_task_id, NULL); satb_mq_set.set_par_closure(_task_id, NULL);
else else
satb_mq_set.set_closure(NULL); satb_mq_set.set_closure(NULL);
@ -3425,20 +3478,32 @@ void CMTask::drain_region_stack(BitMapClosure* bc) {
assert(_region_finger == NULL, assert(_region_finger == NULL,
"it should be NULL when we're not scanning a region"); "it should be NULL when we're not scanning a region");
if (!_cm->region_stack_empty()) { if (!_cm->region_stack_empty() || !_aborted_region.is_empty()) {
if (_cm->verbose_low()) if (_cm->verbose_low())
gclog_or_tty->print_cr("[%d] draining region stack, size = %d", gclog_or_tty->print_cr("[%d] draining region stack, size = %d",
_task_id, _cm->region_stack_size()); _task_id, _cm->region_stack_size());
MemRegion mr = _cm->region_stack_pop_with_lock(); MemRegion mr;
// it returns MemRegion() if the pop fails
statsOnly(if (mr.start() != NULL) ++_region_stack_pops ); if (!_aborted_region.is_empty()) {
mr = _aborted_region;
_aborted_region = MemRegion();
if (_cm->verbose_low())
gclog_or_tty->print_cr("[%d] scanning aborted region [ " PTR_FORMAT ", " PTR_FORMAT " )",
_task_id, mr.start(), mr.end());
} else {
mr = _cm->region_stack_pop_lock_free();
// it returns MemRegion() if the pop fails
statsOnly(if (mr.start() != NULL) ++_region_stack_pops );
}
while (mr.start() != NULL) { while (mr.start() != NULL) {
if (_cm->verbose_medium()) if (_cm->verbose_medium())
gclog_or_tty->print_cr("[%d] we are scanning region " gclog_or_tty->print_cr("[%d] we are scanning region "
"["PTR_FORMAT", "PTR_FORMAT")", "["PTR_FORMAT", "PTR_FORMAT")",
_task_id, mr.start(), mr.end()); _task_id, mr.start(), mr.end());
assert(mr.end() <= _cm->finger(), assert(mr.end() <= _cm->finger(),
"otherwise the region shouldn't be on the stack"); "otherwise the region shouldn't be on the stack");
assert(!mr.is_empty(), "Only non-empty regions live on the region stack"); assert(!mr.is_empty(), "Only non-empty regions live on the region stack");
@ -3451,7 +3516,7 @@ void CMTask::drain_region_stack(BitMapClosure* bc) {
if (has_aborted()) if (has_aborted())
mr = MemRegion(); mr = MemRegion();
else { else {
mr = _cm->region_stack_pop_with_lock(); mr = _cm->region_stack_pop_lock_free();
// it returns MemRegion() if the pop fails // it returns MemRegion() if the pop fails
statsOnly(if (mr.start() != NULL) ++_region_stack_pops ); statsOnly(if (mr.start() != NULL) ++_region_stack_pops );
} }
@ -3465,6 +3530,10 @@ void CMTask::drain_region_stack(BitMapClosure* bc) {
// have definitely set _region_finger to something non-null. // have definitely set _region_finger to something non-null.
assert(_region_finger != NULL, "invariant"); assert(_region_finger != NULL, "invariant");
// Make sure that any previously aborted region has been
// cleared.
assert(_aborted_region.is_empty(), "aborted region not cleared");
// The iteration was actually aborted. So now _region_finger // The iteration was actually aborted. So now _region_finger
// points to the address of the object we last scanned. If we // points to the address of the object we last scanned. If we
// leave it there, when we restart this task, we will rescan // leave it there, when we restart this task, we will rescan
@ -3477,14 +3546,14 @@ void CMTask::drain_region_stack(BitMapClosure* bc) {
if (!newRegion.is_empty()) { if (!newRegion.is_empty()) {
if (_cm->verbose_low()) { if (_cm->verbose_low()) {
gclog_or_tty->print_cr("[%d] pushing unscanned region" gclog_or_tty->print_cr("[%d] recording unscanned region"
"[" PTR_FORMAT "," PTR_FORMAT ") on region stack", "[" PTR_FORMAT "," PTR_FORMAT ") in CMTask",
_task_id, _task_id,
newRegion.start(), newRegion.end()); newRegion.start(), newRegion.end());
} }
// Now push the part of the region we didn't scan on the // Now record the part of the region we didn't scan to
// region stack to make sure a task scans it later. // make sure this task scans it later.
_cm->region_stack_push_with_lock(newRegion); _aborted_region = newRegion;
} }
// break from while // break from while
mr = MemRegion(); mr = MemRegion();
@ -3654,6 +3723,8 @@ void CMTask::do_marking_step(double time_target_ms) {
assert(concurrent() || _cm->region_stack_empty(), assert(concurrent() || _cm->region_stack_empty(),
"the region stack should have been cleared before remark"); "the region stack should have been cleared before remark");
assert(concurrent() || !_cm->has_aborted_regions(),
"aborted regions should have been cleared before remark");
assert(_region_finger == NULL, assert(_region_finger == NULL,
"this should be non-null only when a region is being scanned"); "this should be non-null only when a region is being scanned");
@ -3943,6 +4014,7 @@ void CMTask::do_marking_step(double time_target_ms) {
// that, if a condition is false, we can immediately find out // that, if a condition is false, we can immediately find out
// which one. // which one.
guarantee(_cm->out_of_regions(), "only way to reach here"); guarantee(_cm->out_of_regions(), "only way to reach here");
guarantee(_aborted_region.is_empty(), "only way to reach here");
guarantee(_cm->region_stack_empty(), "only way to reach here"); guarantee(_cm->region_stack_empty(), "only way to reach here");
guarantee(_cm->mark_stack_empty(), "only way to reach here"); guarantee(_cm->mark_stack_empty(), "only way to reach here");
guarantee(_task_queue->size() == 0, "only way to reach here"); guarantee(_task_queue->size() == 0, "only way to reach here");
@ -4042,7 +4114,8 @@ CMTask::CMTask(int task_id,
_nextMarkBitMap(NULL), _hash_seed(17), _nextMarkBitMap(NULL), _hash_seed(17),
_task_queue(task_queue), _task_queue(task_queue),
_task_queues(task_queues), _task_queues(task_queues),
_oop_closure(NULL) { _oop_closure(NULL),
_aborted_region(MemRegion()) {
guarantee(task_queue != NULL, "invariant"); guarantee(task_queue != NULL, "invariant");
guarantee(task_queues != NULL, "invariant"); guarantee(task_queues != NULL, "invariant");

View File

@ -250,21 +250,23 @@ public:
// This is lock-free; assumes that it will only be called in parallel // This is lock-free; assumes that it will only be called in parallel
// with other "push" operations (no pops). // with other "push" operations (no pops).
void push(MemRegion mr); void push_lock_free(MemRegion mr);
#if 0
// This is currently not used. See the comment in the .cpp file.
// Lock-free; assumes that it will only be called in parallel // Lock-free; assumes that it will only be called in parallel
// with other "pop" operations (no pushes). // with other "pop" operations (no pushes).
MemRegion pop(); MemRegion pop_lock_free();
#endif // 0
#if 0
// The routines that manipulate the region stack with a lock are
// not currently used. They should be retained, however, as a
// diagnostic aid.
// These two are the implementations that use a lock. They can be // These two are the implementations that use a lock. They can be
// called concurrently with each other but they should not be called // called concurrently with each other but they should not be called
// concurrently with the lock-free versions (push() / pop()). // concurrently with the lock-free versions (push() / pop()).
void push_with_lock(MemRegion mr); void push_with_lock(MemRegion mr);
MemRegion pop_with_lock(); MemRegion pop_with_lock();
#endif
bool isEmpty() { return _index == 0; } bool isEmpty() { return _index == 0; }
bool isFull() { return _index == _capacity; } bool isFull() { return _index == _capacity; }
@ -398,6 +400,7 @@ protected:
volatile bool _concurrent; volatile bool _concurrent;
// set at the end of a Full GC so that marking aborts // set at the end of a Full GC so that marking aborts
volatile bool _has_aborted; volatile bool _has_aborted;
// used when remark aborts due to an overflow to indicate that // used when remark aborts due to an overflow to indicate that
// another concurrent marking phase should start // another concurrent marking phase should start
volatile bool _restart_for_overflow; volatile bool _restart_for_overflow;
@ -548,23 +551,30 @@ public:
bool mark_stack_overflow() { return _markStack.overflow(); } bool mark_stack_overflow() { return _markStack.overflow(); }
bool mark_stack_empty() { return _markStack.isEmpty(); } bool mark_stack_empty() { return _markStack.isEmpty(); }
// Manipulation of the region stack // (Lock-free) Manipulation of the region stack
bool region_stack_push(MemRegion mr) { bool region_stack_push_lock_free(MemRegion mr) {
// Currently we only call the lock-free version during evacuation // Currently we only call the lock-free version during evacuation
// pauses. // pauses.
assert(SafepointSynchronize::is_at_safepoint(), "world should be stopped"); assert(SafepointSynchronize::is_at_safepoint(), "world should be stopped");
_regionStack.push(mr); _regionStack.push_lock_free(mr);
if (_regionStack.overflow()) { if (_regionStack.overflow()) {
set_has_overflown(); set_has_overflown();
return false; return false;
} }
return true; return true;
} }
// Lock-free version of region-stack pop. Should only be
// called in tandem with other lock-free pops.
MemRegion region_stack_pop_lock_free() {
return _regionStack.pop_lock_free();
}
#if 0 #if 0
// Currently this is not used. See the comment in the .cpp file. // The routines that manipulate the region stack with a lock are
MemRegion region_stack_pop() { return _regionStack.pop(); } // not currently used. They should be retained, however, as a
#endif // 0 // diagnostic aid.
bool region_stack_push_with_lock(MemRegion mr) { bool region_stack_push_with_lock(MemRegion mr) {
// Currently we only call the lock-based version during either // Currently we only call the lock-based version during either
@ -579,6 +589,7 @@ public:
} }
return true; return true;
} }
MemRegion region_stack_pop_with_lock() { MemRegion region_stack_pop_with_lock() {
// Currently we only call the lock-based version during either // Currently we only call the lock-based version during either
// concurrent marking or remark. // concurrent marking or remark.
@ -587,11 +598,21 @@ public:
return _regionStack.pop_with_lock(); return _regionStack.pop_with_lock();
} }
#endif
int region_stack_size() { return _regionStack.size(); } int region_stack_size() { return _regionStack.size(); }
bool region_stack_overflow() { return _regionStack.overflow(); } bool region_stack_overflow() { return _regionStack.overflow(); }
bool region_stack_empty() { return _regionStack.isEmpty(); } bool region_stack_empty() { return _regionStack.isEmpty(); }
// Iterate over any regions that were aborted while draining the
// region stack (any such regions are saved in the corresponding
// CMTask) and invalidate (i.e. assign to the empty MemRegion())
// any regions that point into the collection set.
bool invalidate_aborted_regions_in_cset();
// Returns true if there are any aborted memory regions.
bool has_aborted_regions();
bool concurrent_marking_in_progress() { bool concurrent_marking_in_progress() {
return _concurrent_marking_in_progress; return _concurrent_marking_in_progress;
} }
@ -856,6 +877,15 @@ private:
// stack. // stack.
HeapWord* _region_finger; HeapWord* _region_finger;
// If we abort while scanning a region we record the remaining
// unscanned portion and check this field when marking restarts.
// This avoids having to push on the region stack while other
// marking threads may still be popping regions.
// If we were to push the unscanned portion directly to the
// region stack then we would need to using locking versions
// of the push and pop operations.
MemRegion _aborted_region;
// the number of words this task has scanned // the number of words this task has scanned
size_t _words_scanned; size_t _words_scanned;
// When _words_scanned reaches this limit, the regular clock is // When _words_scanned reaches this limit, the regular clock is
@ -1012,6 +1042,15 @@ public:
void clear_has_aborted() { _has_aborted = false; } void clear_has_aborted() { _has_aborted = false; }
bool claimed() { return _claimed; } bool claimed() { return _claimed; }
// Support routines for the partially scanned region that may be
// recorded as a result of aborting while draining the CMRegionStack
MemRegion aborted_region() { return _aborted_region; }
void set_aborted_region(MemRegion mr)
{ _aborted_region = mr; }
// Clears any recorded partially scanned region
void clear_aborted_region() { set_aborted_region(MemRegion()); }
void set_oop_closure(OopClosure* oop_closure) { void set_oop_closure(OopClosure* oop_closure) {
_oop_closure = oop_closure; _oop_closure = oop_closure;
} }

View File

@ -303,9 +303,10 @@ void ConcurrentMarkThread::print_on(outputStream* st) const {
} }
void ConcurrentMarkThread::sleepBeforeNextCycle() { void ConcurrentMarkThread::sleepBeforeNextCycle() {
clear_in_progress();
// We join here because we don't want to do the "shouldConcurrentMark()" // We join here because we don't want to do the "shouldConcurrentMark()"
// below while the world is otherwise stopped. // below while the world is otherwise stopped.
assert(!in_progress(), "should have been cleared");
MutexLockerEx x(CGC_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx x(CGC_lock, Mutex::_no_safepoint_check_flag);
while (!started()) { while (!started()) {
CGC_lock->wait(Mutex::_no_safepoint_check_flag); CGC_lock->wait(Mutex::_no_safepoint_check_flag);

View File

@ -69,12 +69,12 @@ class ConcurrentMarkThread: public ConcurrentGCThread {
ConcurrentMark* cm() { return _cm; } ConcurrentMark* cm() { return _cm; }
void set_started() { _started = true; } void set_started() { assert(!_in_progress, "cycle in progress"); _started = true; }
void clear_started() { _started = false; } void clear_started() { assert(_in_progress, "must be starting a cycle"); _started = false; }
bool started() { return _started; } bool started() { return _started; }
void set_in_progress() { _in_progress = true; } void set_in_progress() { assert(_started, "must be starting a cycle"); _in_progress = true; }
void clear_in_progress() { _in_progress = false; } void clear_in_progress() { assert(!_started, "must not be starting a new cycle"); _in_progress = false; }
bool in_progress() { return _in_progress; } bool in_progress() { return _in_progress; }
// This flag returns true from the moment a marking cycle is // This flag returns true from the moment a marking cycle is

View File

@ -37,11 +37,10 @@ public:
class DirtyCardQueue: public PtrQueue { class DirtyCardQueue: public PtrQueue {
public: public:
DirtyCardQueue(PtrQueueSet* qset_, bool perm = false) : DirtyCardQueue(PtrQueueSet* qset_, bool perm = false) :
PtrQueue(qset_, perm) // Dirty card queues are always active, so we create them with their
{ // active field set to true.
// Dirty card queues are always active. PtrQueue(qset_, perm, true /* active */) { }
_active = true;
}
// Apply the closure to all elements, and reset the index to make the // Apply the closure to all elements, and reset the index to make the
// buffer empty. If a closure application returns "false", return // buffer empty. If a closure application returns "false", return
// "false" immediately, halting the iteration. If "consume" is true, // "false" immediately, halting the iteration. If "consume" is true,

View File

@ -961,7 +961,8 @@ void G1CollectedHeap::do_collection(bool explicit_gc,
} }
// Rebuild remembered sets of all regions. // Rebuild remembered sets of all regions.
if (ParallelGCThreads > 0) {
if (G1CollectedHeap::use_parallel_gc_threads()) {
ParRebuildRSTask rebuild_rs_task(this); ParRebuildRSTask rebuild_rs_task(this);
assert(check_heap_region_claim_values( assert(check_heap_region_claim_values(
HeapRegion::InitialClaimValue), "sanity check"); HeapRegion::InitialClaimValue), "sanity check");
@ -1784,6 +1785,14 @@ void G1CollectedHeap::increment_full_collections_completed(bool outer) {
_full_collections_completed += 1; _full_collections_completed += 1;
// We need to clear the "in_progress" flag in the CM thread before
// we wake up any waiters (especially when ExplicitInvokesConcurrent
// is set) so that if a waiter requests another System.gc() it doesn't
// incorrectly see that a marking cyle is still in progress.
if (outer) {
_cmThread->clear_in_progress();
}
// This notify_all() will ensure that a thread that called // This notify_all() will ensure that a thread that called
// System.gc() with (with ExplicitGCInvokesConcurrent set or not) // System.gc() with (with ExplicitGCInvokesConcurrent set or not)
// and it's waiting for a full GC to finish will be woken up. It is // and it's waiting for a full GC to finish will be woken up. It is
@ -1960,7 +1969,7 @@ G1CollectedHeap::heap_region_par_iterate_chunked(HeapRegionClosure* cl,
int worker, int worker,
jint claim_value) { jint claim_value) {
const size_t regions = n_regions(); const size_t regions = n_regions();
const size_t worker_num = (ParallelGCThreads > 0 ? ParallelGCThreads : 1); const size_t worker_num = (G1CollectedHeap::use_parallel_gc_threads() ? ParallelGCThreads : 1);
// try to spread out the starting points of the workers // try to spread out the starting points of the workers
const size_t start_index = regions / worker_num * (size_t) worker; const size_t start_index = regions / worker_num * (size_t) worker;
@ -2527,7 +2536,7 @@ void G1CollectedHeap::print_on_extended(outputStream* st) const {
} }
void G1CollectedHeap::print_gc_threads_on(outputStream* st) const { void G1CollectedHeap::print_gc_threads_on(outputStream* st) const {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
workers()->print_worker_threads_on(st); workers()->print_worker_threads_on(st);
} }
@ -2543,7 +2552,7 @@ void G1CollectedHeap::print_gc_threads_on(outputStream* st) const {
} }
void G1CollectedHeap::gc_threads_do(ThreadClosure* tc) const { void G1CollectedHeap::gc_threads_do(ThreadClosure* tc) const {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
workers()->threads_do(tc); workers()->threads_do(tc);
} }
tc->do_thread(_cmThread); tc->do_thread(_cmThread);
@ -3083,7 +3092,7 @@ void G1CollectedHeap::set_gc_alloc_region(int purpose, HeapRegion* r) {
if (r != NULL) { if (r != NULL) {
r_used = r->used(); r_used = r->used();
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
// need to take the lock to guard against two threads calling // need to take the lock to guard against two threads calling
// get_gc_alloc_region concurrently (very unlikely but...) // get_gc_alloc_region concurrently (very unlikely but...)
MutexLockerEx x(ParGCRareEvent_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx x(ParGCRareEvent_lock, Mutex::_no_safepoint_check_flag);
@ -4182,6 +4191,8 @@ public:
// *** Common G1 Evacuation Stuff // *** Common G1 Evacuation Stuff
// This method is run in a GC worker.
void void
G1CollectedHeap:: G1CollectedHeap::
g1_process_strong_roots(bool collecting_perm_gen, g1_process_strong_roots(bool collecting_perm_gen,
@ -4259,7 +4270,7 @@ public:
}; };
void G1CollectedHeap::save_marks() { void G1CollectedHeap::save_marks() {
if (ParallelGCThreads == 0) { if (!CollectedHeap::use_parallel_gc_threads()) {
SaveMarksClosure sm; SaveMarksClosure sm;
heap_region_iterate(&sm); heap_region_iterate(&sm);
} }
@ -4284,7 +4295,7 @@ void G1CollectedHeap::evacuate_collection_set() {
assert(dirty_card_queue_set().completed_buffers_num() == 0, "Should be empty"); assert(dirty_card_queue_set().completed_buffers_num() == 0, "Should be empty");
double start_par = os::elapsedTime(); double start_par = os::elapsedTime();
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
// The individual threads will set their evac-failure closures. // The individual threads will set their evac-failure closures.
StrongRootsScope srs(this); StrongRootsScope srs(this);
if (ParallelGCVerbose) G1ParScanThreadState::print_termination_stats_hdr(); if (ParallelGCVerbose) G1ParScanThreadState::print_termination_stats_hdr();

View File

@ -656,6 +656,9 @@ protected:
bool _unclean_regions_coming; bool _unclean_regions_coming;
public: public:
SubTasksDone* process_strong_tasks() { return _process_strong_tasks; }
void set_refine_cte_cl_concurrency(bool concurrent); void set_refine_cte_cl_concurrency(bool concurrent);
RefToScanQueue *task_queue(int i) const; RefToScanQueue *task_queue(int i) const;
@ -684,7 +687,7 @@ public:
void set_par_threads(int t) { void set_par_threads(int t) {
SharedHeap::set_par_threads(t); SharedHeap::set_par_threads(t);
_process_strong_tasks->set_par_threads(t); _process_strong_tasks->set_n_threads(t);
} }
virtual CollectedHeap::Name kind() const { virtual CollectedHeap::Name kind() const {
@ -1688,8 +1691,8 @@ public:
ref = new_ref; ref = new_ref;
} }
int refs_to_scan() { return refs()->size(); } int refs_to_scan() { return (int)refs()->size(); }
int overflowed_refs_to_scan() { return refs()->overflow_stack()->length(); } int overflowed_refs_to_scan() { return (int)refs()->overflow_stack()->size(); }
template <class T> void update_rs(HeapRegion* from, T* p, int tid) { template <class T> void update_rs(HeapRegion* from, T* p, int tid) {
if (G1DeferredRSUpdate) { if (G1DeferredRSUpdate) {

View File

@ -72,7 +72,10 @@ static double non_young_other_cost_per_region_ms_defaults[] = {
// </NEW PREDICTION> // </NEW PREDICTION>
G1CollectorPolicy::G1CollectorPolicy() : G1CollectorPolicy::G1CollectorPolicy() :
_parallel_gc_threads((ParallelGCThreads > 0) ? ParallelGCThreads : 1), _parallel_gc_threads(G1CollectedHeap::use_parallel_gc_threads()
? ParallelGCThreads : 1),
_n_pauses(0), _n_pauses(0),
_recent_CH_strong_roots_times_ms(new TruncatedSeq(NumPrevPausesForHeuristics)), _recent_CH_strong_roots_times_ms(new TruncatedSeq(NumPrevPausesForHeuristics)),
_recent_G1_strong_roots_times_ms(new TruncatedSeq(NumPrevPausesForHeuristics)), _recent_G1_strong_roots_times_ms(new TruncatedSeq(NumPrevPausesForHeuristics)),
@ -1073,7 +1076,7 @@ void G1CollectorPolicy::print_stats (int level,
} }
double G1CollectorPolicy::avg_value (double* data) { double G1CollectorPolicy::avg_value (double* data) {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
double ret = 0.0; double ret = 0.0;
for (uint i = 0; i < ParallelGCThreads; ++i) for (uint i = 0; i < ParallelGCThreads; ++i)
ret += data[i]; ret += data[i];
@ -1084,7 +1087,7 @@ double G1CollectorPolicy::avg_value (double* data) {
} }
double G1CollectorPolicy::max_value (double* data) { double G1CollectorPolicy::max_value (double* data) {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
double ret = data[0]; double ret = data[0];
for (uint i = 1; i < ParallelGCThreads; ++i) for (uint i = 1; i < ParallelGCThreads; ++i)
if (data[i] > ret) if (data[i] > ret)
@ -1096,7 +1099,7 @@ double G1CollectorPolicy::max_value (double* data) {
} }
double G1CollectorPolicy::sum_of_values (double* data) { double G1CollectorPolicy::sum_of_values (double* data) {
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
double sum = 0.0; double sum = 0.0;
for (uint i = 0; i < ParallelGCThreads; i++) for (uint i = 0; i < ParallelGCThreads; i++)
sum += data[i]; sum += data[i];
@ -1110,7 +1113,7 @@ double G1CollectorPolicy::max_sum (double* data1,
double* data2) { double* data2) {
double ret = data1[0] + data2[0]; double ret = data1[0] + data2[0];
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
for (uint i = 1; i < ParallelGCThreads; ++i) { for (uint i = 1; i < ParallelGCThreads; ++i) {
double data = data1[i] + data2[i]; double data = data1[i] + data2[i];
if (data > ret) if (data > ret)
@ -1126,7 +1129,7 @@ double G1CollectorPolicy::max_sum (double* data1,
void G1CollectorPolicy::record_collection_pause_end() { void G1CollectorPolicy::record_collection_pause_end() {
double end_time_sec = os::elapsedTime(); double end_time_sec = os::elapsedTime();
double elapsed_ms = _last_pause_time_ms; double elapsed_ms = _last_pause_time_ms;
bool parallel = ParallelGCThreads > 0; bool parallel = G1CollectedHeap::use_parallel_gc_threads();
double evac_ms = (end_time_sec - _cur_G1_strong_roots_end_sec) * 1000.0; double evac_ms = (end_time_sec - _cur_G1_strong_roots_end_sec) * 1000.0;
size_t rs_size = size_t rs_size =
_cur_collection_pause_used_regions_at_start - collection_set_size(); _cur_collection_pause_used_regions_at_start - collection_set_size();
@ -1941,7 +1944,7 @@ G1CollectorPolicy::recent_avg_survival_fraction_work(TruncatedSeq* surviving,
// Further, we're now always doing parallel collection. But I'm still // Further, we're now always doing parallel collection. But I'm still
// leaving this here as a placeholder for a more precise assertion later. // leaving this here as a placeholder for a more precise assertion later.
// (DLD, 10/05.) // (DLD, 10/05.)
assert((true || ParallelGCThreads > 0) || assert((true || G1CollectedHeap::use_parallel_gc_threads()) ||
_g1->evacuation_failed() || _g1->evacuation_failed() ||
recent_survival_rate <= 1.0, "Or bad frac"); recent_survival_rate <= 1.0, "Or bad frac");
return recent_survival_rate; return recent_survival_rate;
@ -1961,7 +1964,7 @@ G1CollectorPolicy::last_survival_fraction_work(TruncatedSeq* surviving,
// Further, we're now always doing parallel collection. But I'm still // Further, we're now always doing parallel collection. But I'm still
// leaving this here as a placeholder for a more precise assertion later. // leaving this here as a placeholder for a more precise assertion later.
// (DLD, 10/05.) // (DLD, 10/05.)
assert((true || ParallelGCThreads > 0) || assert((true || G1CollectedHeap::use_parallel_gc_threads()) ||
last_survival_rate <= 1.0, "Or bad frac"); last_survival_rate <= 1.0, "Or bad frac");
return last_survival_rate; return last_survival_rate;
} else { } else {
@ -2121,7 +2124,7 @@ void G1CollectorPolicy::check_other_times(int level,
} }
void G1CollectorPolicy::print_summary(PauseSummary* summary) const { void G1CollectorPolicy::print_summary(PauseSummary* summary) const {
bool parallel = ParallelGCThreads > 0; bool parallel = G1CollectedHeap::use_parallel_gc_threads();
MainBodySummary* body_summary = summary->main_body_summary(); MainBodySummary* body_summary = summary->main_body_summary();
if (summary->get_total_seq()->num() > 0) { if (summary->get_total_seq()->num() > 0) {
print_summary_sd(0, "Evacuation Pauses", summary->get_total_seq()); print_summary_sd(0, "Evacuation Pauses", summary->get_total_seq());
@ -2559,7 +2562,7 @@ record_concurrent_mark_cleanup_end(size_t freed_bytes,
gclog_or_tty->print_cr(" clear marked regions + work1: %8.3f ms.", gclog_or_tty->print_cr(" clear marked regions + work1: %8.3f ms.",
(clear_marked_end - start)*1000.0); (clear_marked_end - start)*1000.0);
} }
if (ParallelGCThreads > 0) { if (G1CollectedHeap::use_parallel_gc_threads()) {
const size_t OverpartitionFactor = 4; const size_t OverpartitionFactor = 4;
const size_t MinWorkUnit = 8; const size_t MinWorkUnit = 8;
const size_t WorkUnit = const size_t WorkUnit =

View File

@ -101,22 +101,6 @@ void G1MarkSweep::allocate_stacks() {
GenMarkSweep::_preserved_count_max = 0; GenMarkSweep::_preserved_count_max = 0;
GenMarkSweep::_preserved_marks = NULL; GenMarkSweep::_preserved_marks = NULL;
GenMarkSweep::_preserved_count = 0; GenMarkSweep::_preserved_count = 0;
GenMarkSweep::_preserved_mark_stack = NULL;
GenMarkSweep::_preserved_oop_stack = NULL;
GenMarkSweep::_marking_stack =
new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
GenMarkSweep::_objarray_stack =
new (ResourceObj::C_HEAP) GrowableArray<ObjArrayTask>(50, true);
int size = SystemDictionary::number_of_classes() * 2;
GenMarkSweep::_revisit_klass_stack =
new (ResourceObj::C_HEAP) GrowableArray<Klass*>(size, true);
// (#klass/k)^2 for k ~ 10 appears a better fit, but this will have to do
// for now until we have a chance to work out a more optimal setting.
GenMarkSweep::_revisit_mdo_stack =
new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true);
} }
void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
@ -145,7 +129,7 @@ void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
// Follow system dictionary roots and unload classes // Follow system dictionary roots and unload classes
bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
assert(GenMarkSweep::_marking_stack->is_empty(), assert(GenMarkSweep::_marking_stack.is_empty(),
"stack should be empty by now"); "stack should be empty by now");
// Follow code cache roots (has to be done after system dictionary, // Follow code cache roots (has to be done after system dictionary,
@ -157,19 +141,19 @@ void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
// Update subklass/sibling/implementor links of live klasses // Update subklass/sibling/implementor links of live klasses
GenMarkSweep::follow_weak_klass_links(); GenMarkSweep::follow_weak_klass_links();
assert(GenMarkSweep::_marking_stack->is_empty(), assert(GenMarkSweep::_marking_stack.is_empty(),
"stack should be empty by now"); "stack should be empty by now");
// Visit memoized MDO's and clear any unmarked weak refs // Visit memoized MDO's and clear any unmarked weak refs
GenMarkSweep::follow_mdo_weak_refs(); GenMarkSweep::follow_mdo_weak_refs();
assert(GenMarkSweep::_marking_stack->is_empty(), "just drained"); assert(GenMarkSweep::_marking_stack.is_empty(), "just drained");
// Visit symbol and interned string tables and delete unmarked oops // Visit symbol and interned string tables and delete unmarked oops
SymbolTable::unlink(&GenMarkSweep::is_alive); SymbolTable::unlink(&GenMarkSweep::is_alive);
StringTable::unlink(&GenMarkSweep::is_alive); StringTable::unlink(&GenMarkSweep::is_alive);
assert(GenMarkSweep::_marking_stack->is_empty(), assert(GenMarkSweep::_marking_stack.is_empty(),
"stack should be empty by now"); "stack should be empty by now");
} }

View File

@ -523,7 +523,7 @@ prepare_for_oops_into_collection_set_do() {
assert(!_traversal_in_progress, "Invariant between iterations."); assert(!_traversal_in_progress, "Invariant between iterations.");
set_traversal(true); set_traversal(true);
if (ParallelGCThreads > 0) { if (ParallelGCThreads > 0) {
_seq_task->set_par_threads((int)n_workers()); _seq_task->set_n_threads((int)n_workers());
} }
guarantee( _cards_scanned == NULL, "invariant" ); guarantee( _cards_scanned == NULL, "invariant" );
_cards_scanned = NEW_C_HEAP_ARRAY(size_t, n_workers()); _cards_scanned = NEW_C_HEAP_ARRAY(size_t, n_workers());

View File

@ -89,6 +89,10 @@ public:
return _buf == NULL ? 0 : _sz - _index; return _buf == NULL ? 0 : _sz - _index;
} }
bool is_empty() {
return _buf == NULL || _sz == _index;
}
// Set the "active" property of the queue to "b". An enqueue to an // Set the "active" property of the queue to "b". An enqueue to an
// inactive thread is a no-op. Setting a queue to inactive resets its // inactive thread is a no-op. Setting a queue to inactive resets its
// log to the empty state. // log to the empty state.

View File

@ -29,7 +29,12 @@ class JavaThread;
class ObjPtrQueue: public PtrQueue { class ObjPtrQueue: public PtrQueue {
public: public:
ObjPtrQueue(PtrQueueSet* qset_, bool perm = false) : ObjPtrQueue(PtrQueueSet* qset_, bool perm = false) :
PtrQueue(qset_, perm, qset_->is_active()) { } // SATB queues are only active during marking cycles. We create
// them with their active field set to false. If a thread is
// created during a cycle and its SATB queue needs to be activated
// before the thread starts running, we'll need to set its active
// field to true. This is done in JavaThread::initialize_queues().
PtrQueue(qset_, perm, false /* active */) { }
// Apply the closure to all elements, and reset the index to make the // Apply the closure to all elements, and reset the index to make the
// buffer empty. // buffer empty.
void apply_closure(ObjectClosure* cl); void apply_closure(ObjectClosure* cl);

View File

@ -171,6 +171,7 @@ concurrentMarkSweepGeneration.hpp generation.hpp
concurrentMarkSweepGeneration.hpp generationCounters.hpp concurrentMarkSweepGeneration.hpp generationCounters.hpp
concurrentMarkSweepGeneration.hpp memoryService.hpp concurrentMarkSweepGeneration.hpp memoryService.hpp
concurrentMarkSweepGeneration.hpp mutexLocker.hpp concurrentMarkSweepGeneration.hpp mutexLocker.hpp
concurrentMarkSweepGeneration.hpp stack.inline.hpp
concurrentMarkSweepGeneration.hpp taskqueue.hpp concurrentMarkSweepGeneration.hpp taskqueue.hpp
concurrentMarkSweepGeneration.hpp virtualspace.hpp concurrentMarkSweepGeneration.hpp virtualspace.hpp
concurrentMarkSweepGeneration.hpp yieldingWorkgroup.hpp concurrentMarkSweepGeneration.hpp yieldingWorkgroup.hpp

View File

@ -187,9 +187,11 @@ psCompactionManager.cpp parMarkBitMap.hpp
psCompactionManager.cpp psParallelCompact.hpp psCompactionManager.cpp psParallelCompact.hpp
psCompactionManager.cpp psCompactionManager.hpp psCompactionManager.cpp psCompactionManager.hpp
psCompactionManager.cpp psOldGen.hpp psCompactionManager.cpp psOldGen.hpp
psCompactionManager.cpp stack.inline.hpp
psCompactionManager.cpp systemDictionary.hpp psCompactionManager.cpp systemDictionary.hpp
psCompactionManager.hpp allocation.hpp psCompactionManager.hpp allocation.hpp
psCompactionManager.hpp stack.hpp
psCompactionManager.hpp taskqueue.hpp psCompactionManager.hpp taskqueue.hpp
psCompactionManager.inline.hpp psCompactionManager.hpp psCompactionManager.inline.hpp psCompactionManager.hpp
@ -233,12 +235,14 @@ psMarkSweep.cpp referencePolicy.hpp
psMarkSweep.cpp referenceProcessor.hpp psMarkSweep.cpp referenceProcessor.hpp
psMarkSweep.cpp safepoint.hpp psMarkSweep.cpp safepoint.hpp
psMarkSweep.cpp spaceDecorator.hpp psMarkSweep.cpp spaceDecorator.hpp
psMarkSweep.cpp stack.inline.hpp
psMarkSweep.cpp symbolTable.hpp psMarkSweep.cpp symbolTable.hpp
psMarkSweep.cpp systemDictionary.hpp psMarkSweep.cpp systemDictionary.hpp
psMarkSweep.cpp vmThread.hpp psMarkSweep.cpp vmThread.hpp
psMarkSweep.hpp markSweep.inline.hpp psMarkSweep.hpp markSweep.inline.hpp
psMarkSweep.hpp collectorCounters.hpp psMarkSweep.hpp collectorCounters.hpp
psMarkSweep.hpp stack.hpp
psMarkSweepDecorator.cpp liveRange.hpp psMarkSweepDecorator.cpp liveRange.hpp
psMarkSweepDecorator.cpp markSweep.inline.hpp psMarkSweepDecorator.cpp markSweep.inline.hpp
@ -280,6 +284,7 @@ psParallelCompact.cpp psYoungGen.hpp
psParallelCompact.cpp referencePolicy.hpp psParallelCompact.cpp referencePolicy.hpp
psParallelCompact.cpp referenceProcessor.hpp psParallelCompact.cpp referenceProcessor.hpp
psParallelCompact.cpp safepoint.hpp psParallelCompact.cpp safepoint.hpp
psParallelCompact.cpp stack.inline.hpp
psParallelCompact.cpp symbolTable.hpp psParallelCompact.cpp symbolTable.hpp
psParallelCompact.cpp systemDictionary.hpp psParallelCompact.cpp systemDictionary.hpp
psParallelCompact.cpp vmThread.hpp psParallelCompact.cpp vmThread.hpp
@ -367,6 +372,7 @@ psScavenge.cpp referencePolicy.hpp
psScavenge.cpp referenceProcessor.hpp psScavenge.cpp referenceProcessor.hpp
psScavenge.cpp resourceArea.hpp psScavenge.cpp resourceArea.hpp
psScavenge.cpp spaceDecorator.hpp psScavenge.cpp spaceDecorator.hpp
psScavenge.cpp stack.inline.hpp
psScavenge.cpp threadCritical.hpp psScavenge.cpp threadCritical.hpp
psScavenge.cpp vmThread.hpp psScavenge.cpp vmThread.hpp
psScavenge.cpp vm_operations.hpp psScavenge.cpp vm_operations.hpp
@ -376,6 +382,7 @@ psScavenge.hpp cardTableExtension.hpp
psScavenge.hpp collectorCounters.hpp psScavenge.hpp collectorCounters.hpp
psScavenge.hpp oop.hpp psScavenge.hpp oop.hpp
psScavenge.hpp psVirtualspace.hpp psScavenge.hpp psVirtualspace.hpp
psScavenge.hpp stack.hpp
psScavenge.inline.hpp cardTableExtension.hpp psScavenge.inline.hpp cardTableExtension.hpp
psScavenge.inline.hpp parallelScavengeHeap.hpp psScavenge.inline.hpp parallelScavengeHeap.hpp

View File

@ -93,11 +93,13 @@ markSweep.cpp oop.inline.hpp
markSweep.hpp growableArray.hpp markSweep.hpp growableArray.hpp
markSweep.hpp markOop.hpp markSweep.hpp markOop.hpp
markSweep.hpp oop.hpp markSweep.hpp oop.hpp
markSweep.hpp stack.hpp
markSweep.hpp timer.hpp markSweep.hpp timer.hpp
markSweep.hpp universe.hpp markSweep.hpp universe.hpp
markSweep.inline.hpp collectedHeap.hpp markSweep.inline.hpp collectedHeap.hpp
markSweep.inline.hpp markSweep.hpp markSweep.inline.hpp markSweep.hpp
markSweep.inline.hpp stack.inline.hpp
mutableSpace.hpp immutableSpace.hpp mutableSpace.hpp immutableSpace.hpp
mutableSpace.hpp memRegion.hpp mutableSpace.hpp memRegion.hpp

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2007, 2010 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -44,7 +44,7 @@ void CardTableModRefBS::par_non_clean_card_iterate_work(Space* sp, MemRegion mr,
int n_strides = n_threads * StridesPerThread; int n_strides = n_threads * StridesPerThread;
SequentialSubTasksDone* pst = sp->par_seq_tasks(); SequentialSubTasksDone* pst = sp->par_seq_tasks();
pst->set_par_threads(n_threads); pst->set_n_threads(n_threads);
pst->set_n_tasks(n_strides); pst->set_n_tasks(n_strides);
int stride = 0; int stride = 0;

View File

@ -34,12 +34,12 @@ ParScanThreadState::ParScanThreadState(Space* to_space_,
Generation* old_gen_, Generation* old_gen_,
int thread_num_, int thread_num_,
ObjToScanQueueSet* work_queue_set_, ObjToScanQueueSet* work_queue_set_,
GrowableArray<oop>** overflow_stack_set_, Stack<oop>* overflow_stacks_,
size_t desired_plab_sz_, size_t desired_plab_sz_,
ParallelTaskTerminator& term_) : ParallelTaskTerminator& term_) :
_to_space(to_space_), _old_gen(old_gen_), _young_gen(gen_), _thread_num(thread_num_), _to_space(to_space_), _old_gen(old_gen_), _young_gen(gen_), _thread_num(thread_num_),
_work_queue(work_queue_set_->queue(thread_num_)), _to_space_full(false), _work_queue(work_queue_set_->queue(thread_num_)), _to_space_full(false),
_overflow_stack(overflow_stack_set_[thread_num_]), _overflow_stack(overflow_stacks_ ? overflow_stacks_ + thread_num_ : NULL),
_ageTable(false), // false ==> not the global age table, no perf data. _ageTable(false), // false ==> not the global age table, no perf data.
_to_space_alloc_buffer(desired_plab_sz_), _to_space_alloc_buffer(desired_plab_sz_),
_to_space_closure(gen_, this), _old_gen_closure(gen_, this), _to_space_closure(gen_, this), _old_gen_closure(gen_, this),
@ -159,11 +159,12 @@ bool ParScanThreadState::take_from_overflow_stack() {
assert(ParGCUseLocalOverflow, "Else should not call"); assert(ParGCUseLocalOverflow, "Else should not call");
assert(young_gen()->overflow_list() == NULL, "Error"); assert(young_gen()->overflow_list() == NULL, "Error");
ObjToScanQueue* queue = work_queue(); ObjToScanQueue* queue = work_queue();
GrowableArray<oop>* of_stack = overflow_stack(); Stack<oop>* const of_stack = overflow_stack();
uint num_overflow_elems = of_stack->length(); const size_t num_overflow_elems = of_stack->size();
uint num_take_elems = MIN2(MIN2((queue->max_elems() - queue->size())/4, const size_t space_available = queue->max_elems() - queue->size();
(juint)ParGCDesiredObjsFromOverflowList), const size_t num_take_elems = MIN3(space_available / 4,
num_overflow_elems); ParGCDesiredObjsFromOverflowList,
num_overflow_elems);
// Transfer the most recent num_take_elems from the overflow // Transfer the most recent num_take_elems from the overflow
// stack to our work queue. // stack to our work queue.
for (size_t i = 0; i != num_take_elems; i++) { for (size_t i = 0; i != num_take_elems; i++) {
@ -271,7 +272,7 @@ public:
ParNewGeneration& gen, ParNewGeneration& gen,
Generation& old_gen, Generation& old_gen,
ObjToScanQueueSet& queue_set, ObjToScanQueueSet& queue_set,
GrowableArray<oop>** overflow_stacks_, Stack<oop>* overflow_stacks_,
size_t desired_plab_sz, size_t desired_plab_sz,
ParallelTaskTerminator& term); ParallelTaskTerminator& term);
@ -302,17 +303,19 @@ private:
ParScanThreadStateSet::ParScanThreadStateSet( ParScanThreadStateSet::ParScanThreadStateSet(
int num_threads, Space& to_space, ParNewGeneration& gen, int num_threads, Space& to_space, ParNewGeneration& gen,
Generation& old_gen, ObjToScanQueueSet& queue_set, Generation& old_gen, ObjToScanQueueSet& queue_set,
GrowableArray<oop>** overflow_stack_set_, Stack<oop>* overflow_stacks,
size_t desired_plab_sz, ParallelTaskTerminator& term) size_t desired_plab_sz, ParallelTaskTerminator& term)
: ResourceArray(sizeof(ParScanThreadState), num_threads), : ResourceArray(sizeof(ParScanThreadState), num_threads),
_gen(gen), _next_gen(old_gen), _term(term) _gen(gen), _next_gen(old_gen), _term(term)
{ {
assert(num_threads > 0, "sanity check!"); assert(num_threads > 0, "sanity check!");
assert(ParGCUseLocalOverflow == (overflow_stacks != NULL),
"overflow_stack allocation mismatch");
// Initialize states. // Initialize states.
for (int i = 0; i < num_threads; ++i) { for (int i = 0; i < num_threads; ++i) {
new ((ParScanThreadState*)_data + i) new ((ParScanThreadState*)_data + i)
ParScanThreadState(&to_space, &gen, &old_gen, i, &queue_set, ParScanThreadState(&to_space, &gen, &old_gen, i, &queue_set,
overflow_stack_set_, desired_plab_sz, term); overflow_stacks, desired_plab_sz, term);
} }
} }
@ -596,14 +599,11 @@ ParNewGeneration(ReservedSpace rs, size_t initial_byte_size, int level)
for (uint i2 = 0; i2 < ParallelGCThreads; i2++) for (uint i2 = 0; i2 < ParallelGCThreads; i2++)
_task_queues->queue(i2)->initialize(); _task_queues->queue(i2)->initialize();
_overflow_stacks = NEW_C_HEAP_ARRAY(GrowableArray<oop>*, ParallelGCThreads); _overflow_stacks = NULL;
guarantee(_overflow_stacks != NULL, "Overflow stack set allocation failure"); if (ParGCUseLocalOverflow) {
for (uint i = 0; i < ParallelGCThreads; i++) { _overflow_stacks = NEW_C_HEAP_ARRAY(Stack<oop>, ParallelGCThreads);
if (ParGCUseLocalOverflow) { for (size_t i = 0; i < ParallelGCThreads; ++i) {
_overflow_stacks[i] = new (ResourceObj::C_HEAP) GrowableArray<oop>(512, true); new (_overflow_stacks + i) Stack<oop>();
guarantee(_overflow_stacks[i] != NULL, "Overflow Stack allocation failure.");
} else {
_overflow_stacks[i] = NULL;
} }
} }
@ -937,12 +937,9 @@ void ParNewGeneration::collect(bool full,
} else { } else {
assert(HandlePromotionFailure, assert(HandlePromotionFailure,
"Should only be here if promotion failure handling is on"); "Should only be here if promotion failure handling is on");
if (_promo_failure_scan_stack != NULL) { assert(_promo_failure_scan_stack.is_empty(), "post condition");
// Can be non-null because of reference processing. _promo_failure_scan_stack.clear(true); // Clear cached segments.
// Free stack with its elements.
delete _promo_failure_scan_stack;
_promo_failure_scan_stack = NULL;
}
remove_forwarding_pointers(); remove_forwarding_pointers();
if (PrintGCDetails) { if (PrintGCDetails) {
gclog_or_tty->print(" (promotion failed)"); gclog_or_tty->print(" (promotion failed)");
@ -1397,8 +1394,8 @@ bool ParNewGeneration::take_from_overflow_list_work(ParScanThreadState* par_scan
size_t objsFromOverflow = MIN2((size_t)(work_q->max_elems() - work_q->size())/4, size_t objsFromOverflow = MIN2((size_t)(work_q->max_elems() - work_q->size())/4,
(size_t)ParGCDesiredObjsFromOverflowList); (size_t)ParGCDesiredObjsFromOverflowList);
assert(par_scan_state->overflow_stack() == NULL, "Error");
assert(!UseCompressedOops, "Error"); assert(!UseCompressedOops, "Error");
assert(par_scan_state->overflow_stack() == NULL, "Error");
if (_overflow_list == NULL) return false; if (_overflow_list == NULL) return false;
// Otherwise, there was something there; try claiming the list. // Otherwise, there was something there; try claiming the list.
@ -1533,3 +1530,7 @@ void ParNewGeneration::ref_processor_init()
const char* ParNewGeneration::name() const { const char* ParNewGeneration::name() const {
return "par new generation"; return "par new generation";
} }
bool ParNewGeneration::in_use() {
return UseParNewGC && ParallelGCThreads > 0;
}

View File

@ -52,7 +52,7 @@ class ParScanThreadState {
friend class ParScanThreadStateSet; friend class ParScanThreadStateSet;
private: private:
ObjToScanQueue *_work_queue; ObjToScanQueue *_work_queue;
GrowableArray<oop>* _overflow_stack; Stack<oop>* const _overflow_stack;
ParGCAllocBuffer _to_space_alloc_buffer; ParGCAllocBuffer _to_space_alloc_buffer;
@ -120,7 +120,7 @@ class ParScanThreadState {
ParScanThreadState(Space* to_space_, ParNewGeneration* gen_, ParScanThreadState(Space* to_space_, ParNewGeneration* gen_,
Generation* old_gen_, int thread_num_, Generation* old_gen_, int thread_num_,
ObjToScanQueueSet* work_queue_set_, ObjToScanQueueSet* work_queue_set_,
GrowableArray<oop>** overflow_stack_set_, Stack<oop>* overflow_stacks_,
size_t desired_plab_sz_, size_t desired_plab_sz_,
ParallelTaskTerminator& term_); ParallelTaskTerminator& term_);
@ -144,7 +144,7 @@ class ParScanThreadState {
void trim_queues(int max_size); void trim_queues(int max_size);
// Private overflow stack usage // Private overflow stack usage
GrowableArray<oop>* overflow_stack() { return _overflow_stack; } Stack<oop>* overflow_stack() { return _overflow_stack; }
bool take_from_overflow_stack(); bool take_from_overflow_stack();
void push_on_overflow_stack(oop p); void push_on_overflow_stack(oop p);
@ -301,7 +301,7 @@ class ParNewGeneration: public DefNewGeneration {
ObjToScanQueueSet* _task_queues; ObjToScanQueueSet* _task_queues;
// Per-worker-thread local overflow stacks // Per-worker-thread local overflow stacks
GrowableArray<oop>** _overflow_stacks; Stack<oop>* _overflow_stacks;
// Desired size of survivor space plab's // Desired size of survivor space plab's
PLABStats _plab_stats; PLABStats _plab_stats;
@ -350,6 +350,8 @@ class ParNewGeneration: public DefNewGeneration {
delete _task_queues; delete _task_queues;
} }
static bool in_use();
virtual void ref_processor_init(); virtual void ref_processor_init();
virtual Generation::Name kind() { return Generation::ParNew; } virtual Generation::Name kind() { return Generation::ParNew; }
virtual const char* name() const; virtual const char* name() const;

View File

@ -59,8 +59,6 @@ void MarkFromRootsTask::do_it(GCTaskManager* manager, uint which) {
PrintGCDetails && TraceParallelOldGCTasks, true, gclog_or_tty)); PrintGCDetails && TraceParallelOldGCTasks, true, gclog_or_tty));
ParCompactionManager* cm = ParCompactionManager* cm =
ParCompactionManager::gc_thread_compaction_manager(which); ParCompactionManager::gc_thread_compaction_manager(which);
assert(cm->stacks_have_been_allocated(),
"Stack space has not been allocated");
PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm); PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm);
switch (_root_type) { switch (_root_type) {
@ -119,7 +117,6 @@ void MarkFromRootsTask::do_it(GCTaskManager* manager, uint which) {
// Do the real work // Do the real work
cm->follow_marking_stacks(); cm->follow_marking_stacks();
// cm->deallocate_stacks();
} }
@ -135,8 +132,6 @@ void RefProcTaskProxy::do_it(GCTaskManager* manager, uint which)
PrintGCDetails && TraceParallelOldGCTasks, true, gclog_or_tty)); PrintGCDetails && TraceParallelOldGCTasks, true, gclog_or_tty));
ParCompactionManager* cm = ParCompactionManager* cm =
ParCompactionManager::gc_thread_compaction_manager(which); ParCompactionManager::gc_thread_compaction_manager(which);
assert(cm->stacks_have_been_allocated(),
"Stack space has not been allocated");
PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm); PSParallelCompact::MarkAndPushClosure mark_and_push_closure(cm);
PSParallelCompact::FollowStackClosure follow_stack_closure(cm); PSParallelCompact::FollowStackClosure follow_stack_closure(cm);
_rp_task.work(_work_id, *PSParallelCompact::is_alive_closure(), _rp_task.work(_work_id, *PSParallelCompact::is_alive_closure(),

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2005, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -242,7 +242,11 @@ class UpdateDensePrefixTask : public GCTask {
// //
class DrainStacksCompactionTask : public GCTask { class DrainStacksCompactionTask : public GCTask {
uint _stack_index;
uint stack_index() { return _stack_index; }
public: public:
DrainStacksCompactionTask(uint stack_index) : GCTask(),
_stack_index(stack_index) {};
char* name() { return (char *)"drain-region-task"; } char* name() { return (char *)"drain-region-task"; }
virtual void do_it(GCTaskManager* manager, uint which); virtual void do_it(GCTaskManager* manager, uint which);
}; };

View File

@ -46,23 +46,6 @@ ParCompactionManager::ParCompactionManager() :
marking_stack()->initialize(); marking_stack()->initialize();
_objarray_stack.initialize(); _objarray_stack.initialize();
region_stack()->initialize(); region_stack()->initialize();
// Note that _revisit_klass_stack is allocated out of the
// C heap (as opposed to out of ResourceArena).
int size =
(SystemDictionary::number_of_classes() * 2) * 2 / ParallelGCThreads;
_revisit_klass_stack = new (ResourceObj::C_HEAP) GrowableArray<Klass*>(size, true);
// From some experiments (#klass/k)^2 for k = 10 seems a better fit, but this will
// have to do for now until we are able to investigate a more optimal setting.
_revisit_mdo_stack = new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true);
}
ParCompactionManager::~ParCompactionManager() {
delete _revisit_klass_stack;
delete _revisit_mdo_stack;
// _manager_array and _stack_array are statics
// shared with all instances of ParCompactionManager
// should not be deallocated.
} }
void ParCompactionManager::initialize(ParMarkBitMap* mbm) { void ParCompactionManager::initialize(ParMarkBitMap* mbm) {
@ -134,9 +117,9 @@ ParCompactionManager::gc_thread_compaction_manager(int index) {
} }
void ParCompactionManager::reset() { void ParCompactionManager::reset() {
for(uint i=0; i<ParallelGCThreads+1; i++) { for(uint i = 0; i < ParallelGCThreads + 1; i++) {
manager_array(i)->revisit_klass_stack()->clear(); assert(manager_array(i)->revisit_klass_stack()->is_empty(), "sanity");
manager_array(i)->revisit_mdo_stack()->clear(); assert(manager_array(i)->revisit_mdo_stack()->is_empty(), "sanity");
} }
} }
@ -178,10 +161,3 @@ void ParCompactionManager::drain_region_stacks() {
} }
} while (!region_stack()->is_empty()); } while (!region_stack()->is_empty());
} }
#ifdef ASSERT
bool ParCompactionManager::stacks_have_been_allocated() {
return (revisit_klass_stack()->data_addr() != NULL &&
revisit_mdo_stack()->data_addr() != NULL);
}
#endif

View File

@ -80,10 +80,9 @@ private:
// type of TaskQueue. // type of TaskQueue.
RegionTaskQueue _region_stack; RegionTaskQueue _region_stack;
#if 1 // does this happen enough to need a per thread stack? Stack<Klass*> _revisit_klass_stack;
GrowableArray<Klass*>* _revisit_klass_stack; Stack<DataLayout*> _revisit_mdo_stack;
GrowableArray<DataLayout*>* _revisit_mdo_stack;
#endif
static ParMarkBitMap* _mark_bitmap; static ParMarkBitMap* _mark_bitmap;
Action _action; Action _action;
@ -113,10 +112,7 @@ private:
inline static ParCompactionManager* manager_array(int index); inline static ParCompactionManager* manager_array(int index);
ParCompactionManager(); ParCompactionManager();
~ParCompactionManager();
void allocate_stacks();
void deallocate_stacks();
ParMarkBitMap* mark_bitmap() { return _mark_bitmap; } ParMarkBitMap* mark_bitmap() { return _mark_bitmap; }
// Take actions in preparation for a compaction. // Take actions in preparation for a compaction.
@ -129,11 +125,8 @@ private:
bool should_verify_only(); bool should_verify_only();
bool should_reset_only(); bool should_reset_only();
#if 1 Stack<Klass*>* revisit_klass_stack() { return &_revisit_klass_stack; }
// Probably stays as a growable array Stack<DataLayout*>* revisit_mdo_stack() { return &_revisit_mdo_stack; }
GrowableArray<Klass*>* revisit_klass_stack() { return _revisit_klass_stack; }
GrowableArray<DataLayout*>* revisit_mdo_stack() { return _revisit_mdo_stack; }
#endif
// Save for later processing. Must not fail. // Save for later processing. Must not fail.
inline void push(oop obj) { _marking_stack.push(obj); } inline void push(oop obj) { _marking_stack.push(obj); }
@ -162,10 +155,6 @@ private:
// Process tasks remaining on any stack // Process tasks remaining on any stack
void drain_region_stacks(); void drain_region_stacks();
// Debugging support
#ifdef ASSERT
bool stacks_have_been_allocated();
#endif
}; };
inline ParCompactionManager* ParCompactionManager::manager_array(int index) { inline ParCompactionManager* ParCompactionManager::manager_array(int index) {

View File

@ -466,33 +466,16 @@ void PSMarkSweep::allocate_stacks() {
_preserved_count_max = pointer_delta(to_space->end(), to_space->top(), sizeof(jbyte)); _preserved_count_max = pointer_delta(to_space->end(), to_space->top(), sizeof(jbyte));
// Now divide by the size of a PreservedMark // Now divide by the size of a PreservedMark
_preserved_count_max /= sizeof(PreservedMark); _preserved_count_max /= sizeof(PreservedMark);
_preserved_mark_stack = NULL;
_preserved_oop_stack = NULL;
_marking_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
_objarray_stack = new (ResourceObj::C_HEAP) GrowableArray<ObjArrayTask>(50, true);
int size = SystemDictionary::number_of_classes() * 2;
_revisit_klass_stack = new (ResourceObj::C_HEAP) GrowableArray<Klass*>(size, true);
// (#klass/k)^2, for k ~ 10 appears a better setting, but this will have to do for
// now until we investigate a more optimal setting.
_revisit_mdo_stack = new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true);
} }
void PSMarkSweep::deallocate_stacks() { void PSMarkSweep::deallocate_stacks() {
if (_preserved_oop_stack) { _preserved_mark_stack.clear(true);
delete _preserved_mark_stack; _preserved_oop_stack.clear(true);
_preserved_mark_stack = NULL; _marking_stack.clear();
delete _preserved_oop_stack; _objarray_stack.clear(true);
_preserved_oop_stack = NULL; _revisit_klass_stack.clear(true);
} _revisit_mdo_stack.clear(true);
delete _marking_stack;
delete _objarray_stack;
delete _revisit_klass_stack;
delete _revisit_mdo_stack;
} }
void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) { void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
@ -542,17 +525,17 @@ void PSMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
// Update subklass/sibling/implementor links of live klasses // Update subklass/sibling/implementor links of live klasses
follow_weak_klass_links(); follow_weak_klass_links();
assert(_marking_stack->is_empty(), "just drained"); assert(_marking_stack.is_empty(), "just drained");
// Visit memoized mdo's and clear unmarked weak refs // Visit memoized mdo's and clear unmarked weak refs
follow_mdo_weak_refs(); follow_mdo_weak_refs();
assert(_marking_stack->is_empty(), "just drained"); assert(_marking_stack.is_empty(), "just drained");
// Visit symbol and interned string tables and delete unmarked oops // Visit symbol and interned string tables and delete unmarked oops
SymbolTable::unlink(is_alive_closure()); SymbolTable::unlink(is_alive_closure());
StringTable::unlink(is_alive_closure()); StringTable::unlink(is_alive_closure());
assert(_marking_stack->is_empty(), "stack should be empty by now"); assert(_marking_stack.is_empty(), "stack should be empty by now");
} }

View File

@ -2170,6 +2170,16 @@ void PSParallelCompact::invoke_no_policy(bool maximum_heap_compaction) {
heap->update_counters(); heap->update_counters();
} }
#ifdef ASSERT
for (size_t i = 0; i < ParallelGCThreads + 1; ++i) {
ParCompactionManager* const cm =
ParCompactionManager::manager_array(int(i));
assert(cm->marking_stack()->is_empty(), "should be empty");
assert(cm->region_stack()->is_empty(), "should be empty");
assert(cm->revisit_klass_stack()->is_empty(), "should be empty");
}
#endif // ASSERT
if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) { if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
HandleMark hm; // Discard invalid handles created during verification HandleMark hm; // Discard invalid handles created during verification
gclog_or_tty->print(" VerifyAfterGC:"); gclog_or_tty->print(" VerifyAfterGC:");
@ -2449,7 +2459,7 @@ void PSParallelCompact::enqueue_region_draining_tasks(GCTaskQueue* q,
const unsigned int task_count = MAX2(parallel_gc_threads, 1U); const unsigned int task_count = MAX2(parallel_gc_threads, 1U);
for (unsigned int j = 0; j < task_count; j++) { for (unsigned int j = 0; j < task_count; j++) {
q->enqueue(new DrainStacksCompactionTask()); q->enqueue(new DrainStacksCompactionTask(j));
} }
// Find all regions that are available (can be filled immediately) and // Find all regions that are available (can be filled immediately) and
@ -2711,21 +2721,22 @@ PSParallelCompact::follow_weak_klass_links() {
// All klasses on the revisit stack are marked at this point. // All klasses on the revisit stack are marked at this point.
// Update and follow all subklass, sibling and implementor links. // Update and follow all subklass, sibling and implementor links.
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("#classes in system dictionary = %d", SystemDictionary::number_of_classes()); gclog_or_tty->print_cr("#classes in system dictionary = %d",
SystemDictionary::number_of_classes());
} }
for (uint i = 0; i < ParallelGCThreads + 1; i++) { for (uint i = 0; i < ParallelGCThreads + 1; i++) {
ParCompactionManager* cm = ParCompactionManager::manager_array(i); ParCompactionManager* cm = ParCompactionManager::manager_array(i);
KeepAliveClosure keep_alive_closure(cm); KeepAliveClosure keep_alive_closure(cm);
int length = cm->revisit_klass_stack()->length(); Stack<Klass*>* const rks = cm->revisit_klass_stack();
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("Revisit klass stack[%d] length = %d", i, length); gclog_or_tty->print_cr("Revisit klass stack[%u] length = " SIZE_FORMAT,
i, rks->size());
} }
for (int j = 0; j < length; j++) { while (!rks->is_empty()) {
cm->revisit_klass_stack()->at(j)->follow_weak_klass_links( Klass* const k = rks->pop();
is_alive_closure(), k->follow_weak_klass_links(is_alive_closure(), &keep_alive_closure);
&keep_alive_closure);
} }
// revisit_klass_stack is cleared in reset()
cm->follow_marking_stacks(); cm->follow_marking_stacks();
} }
} }
@ -2744,19 +2755,20 @@ void PSParallelCompact::follow_mdo_weak_refs() {
// we can visit and clear any weak references from MDO's which // we can visit and clear any weak references from MDO's which
// we memoized during the strong marking phase. // we memoized during the strong marking phase.
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("#classes in system dictionary = %d", SystemDictionary::number_of_classes()); gclog_or_tty->print_cr("#classes in system dictionary = %d",
SystemDictionary::number_of_classes());
} }
for (uint i = 0; i < ParallelGCThreads + 1; i++) { for (uint i = 0; i < ParallelGCThreads + 1; i++) {
ParCompactionManager* cm = ParCompactionManager::manager_array(i); ParCompactionManager* cm = ParCompactionManager::manager_array(i);
GrowableArray<DataLayout*>* rms = cm->revisit_mdo_stack(); Stack<DataLayout*>* rms = cm->revisit_mdo_stack();
int length = rms->length();
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("Revisit MDO stack[%d] length = %d", i, length); gclog_or_tty->print_cr("Revisit MDO stack[%u] size = " SIZE_FORMAT,
i, rms->size());
} }
for (int j = 0; j < length; j++) { while (!rms->is_empty()) {
rms->at(j)->follow_weak_refs(is_alive_closure()); rms->pop()->follow_weak_refs(is_alive_closure());
} }
// revisit_mdo_stack is cleared in reset()
cm->follow_marking_stacks(); cm->follow_marking_stacks();
} }
} }

View File

@ -185,7 +185,6 @@ void PSPromotionManager::reset() {
void PSPromotionManager::drain_stacks_depth(bool totally_drain) { void PSPromotionManager::drain_stacks_depth(bool totally_drain) {
assert(claimed_stack_depth()->overflow_stack() != NULL, "invariant");
totally_drain = totally_drain || _totally_drain; totally_drain = totally_drain || _totally_drain;
#ifdef ASSERT #ifdef ASSERT

View File

@ -34,9 +34,10 @@ bool PSScavenge::_survivor_overflow = false;
int PSScavenge::_tenuring_threshold = 0; int PSScavenge::_tenuring_threshold = 0;
HeapWord* PSScavenge::_young_generation_boundary = NULL; HeapWord* PSScavenge::_young_generation_boundary = NULL;
elapsedTimer PSScavenge::_accumulated_time; elapsedTimer PSScavenge::_accumulated_time;
GrowableArray<markOop>* PSScavenge::_preserved_mark_stack = NULL; Stack<markOop> PSScavenge::_preserved_mark_stack;
GrowableArray<oop>* PSScavenge::_preserved_oop_stack = NULL; Stack<oop> PSScavenge::_preserved_oop_stack;
CollectorCounters* PSScavenge::_counters = NULL; CollectorCounters* PSScavenge::_counters = NULL;
bool PSScavenge::_promotion_failed = false;
// Define before use // Define before use
class PSIsAliveClosure: public BoolObjectClosure { class PSIsAliveClosure: public BoolObjectClosure {
@ -223,6 +224,9 @@ bool PSScavenge::invoke_no_policy() {
assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint"); assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread"); assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread");
assert(_preserved_mark_stack.is_empty(), "should be empty");
assert(_preserved_oop_stack.is_empty(), "should be empty");
TimeStamp scavenge_entry; TimeStamp scavenge_entry;
TimeStamp scavenge_midpoint; TimeStamp scavenge_midpoint;
TimeStamp scavenge_exit; TimeStamp scavenge_exit;
@ -636,24 +640,20 @@ void PSScavenge::clean_up_failed_promotion() {
young_gen->object_iterate(&unforward_closure); young_gen->object_iterate(&unforward_closure);
if (PrintGC && Verbose) { if (PrintGC && Verbose) {
gclog_or_tty->print_cr("Restoring %d marks", gclog_or_tty->print_cr("Restoring %d marks", _preserved_oop_stack.size());
_preserved_oop_stack->length());
} }
// Restore any saved marks. // Restore any saved marks.
for (int i=0; i < _preserved_oop_stack->length(); i++) { while (!_preserved_oop_stack.is_empty()) {
oop obj = _preserved_oop_stack->at(i); oop obj = _preserved_oop_stack.pop();
markOop mark = _preserved_mark_stack->at(i); markOop mark = _preserved_mark_stack.pop();
obj->set_mark(mark); obj->set_mark(mark);
} }
// Deallocate the preserved mark and oop stacks. // Clear the preserved mark and oop stack caches.
// The stacks were allocated as CHeap objects, so _preserved_mark_stack.clear(true);
// we must call delete to prevent mem leaks. _preserved_oop_stack.clear(true);
delete _preserved_mark_stack; _promotion_failed = false;
_preserved_mark_stack = NULL;
delete _preserved_oop_stack;
_preserved_oop_stack = NULL;
} }
// Reset the PromotionFailureALot counters. // Reset the PromotionFailureALot counters.
@ -661,27 +661,16 @@ void PSScavenge::clean_up_failed_promotion() {
} }
// This method is called whenever an attempt to promote an object // This method is called whenever an attempt to promote an object
// fails. Some markOops will need preserving, some will not. Note // fails. Some markOops will need preservation, some will not. Note
// that the entire eden is traversed after a failed promotion, with // that the entire eden is traversed after a failed promotion, with
// all forwarded headers replaced by the default markOop. This means // all forwarded headers replaced by the default markOop. This means
// it is not neccessary to preserve most markOops. // it is not neccessary to preserve most markOops.
void PSScavenge::oop_promotion_failed(oop obj, markOop obj_mark) { void PSScavenge::oop_promotion_failed(oop obj, markOop obj_mark) {
if (_preserved_mark_stack == NULL) { _promotion_failed = true;
ThreadCritical tc; // Lock and retest
if (_preserved_mark_stack == NULL) {
assert(_preserved_oop_stack == NULL, "Sanity");
_preserved_mark_stack = new (ResourceObj::C_HEAP) GrowableArray<markOop>(40, true);
_preserved_oop_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(40, true);
}
}
// Because we must hold the ThreadCritical lock before using
// the stacks, we should be safe from observing partial allocations,
// which are also guarded by the ThreadCritical lock.
if (obj_mark->must_be_preserved_for_promotion_failure(obj)) { if (obj_mark->must_be_preserved_for_promotion_failure(obj)) {
ThreadCritical tc; ThreadCritical tc;
_preserved_oop_stack->push(obj); _preserved_oop_stack.push(obj);
_preserved_mark_stack->push(obj_mark); _preserved_mark_stack.push(obj_mark);
} }
} }

View File

@ -61,9 +61,10 @@ class PSScavenge: AllStatic {
static HeapWord* _young_generation_boundary; // The lowest address possible for the young_gen. static HeapWord* _young_generation_boundary; // The lowest address possible for the young_gen.
// This is used to decide if an oop should be scavenged, // This is used to decide if an oop should be scavenged,
// cards should be marked, etc. // cards should be marked, etc.
static GrowableArray<markOop>* _preserved_mark_stack; // List of marks to be restored after failed promotion static Stack<markOop> _preserved_mark_stack; // List of marks to be restored after failed promotion
static GrowableArray<oop>* _preserved_oop_stack; // List of oops that need their mark restored. static Stack<oop> _preserved_oop_stack; // List of oops that need their mark restored.
static CollectorCounters* _counters; // collector performance counters static CollectorCounters* _counters; // collector performance counters
static bool _promotion_failed;
static void clean_up_failed_promotion(); static void clean_up_failed_promotion();
@ -79,8 +80,7 @@ class PSScavenge: AllStatic {
// Accessors // Accessors
static int tenuring_threshold() { return _tenuring_threshold; } static int tenuring_threshold() { return _tenuring_threshold; }
static elapsedTimer* accumulated_time() { return &_accumulated_time; } static elapsedTimer* accumulated_time() { return &_accumulated_time; }
static bool promotion_failed() static bool promotion_failed() { return _promotion_failed; }
{ return _preserved_mark_stack != NULL; }
static int consecutive_skipped_scavenges() static int consecutive_skipped_scavenges()
{ return _consecutive_skipped_scavenges; } { return _consecutive_skipped_scavenges; }

View File

@ -185,7 +185,7 @@ SurrogateLockerThread* SurrogateLockerThread::make(TRAPS) {
instanceKlassHandle klass (THREAD, k); instanceKlassHandle klass (THREAD, k);
instanceHandle thread_oop = klass->allocate_instance_handle(CHECK_NULL); instanceHandle thread_oop = klass->allocate_instance_handle(CHECK_NULL);
const char thread_name[] = "Surrogate Locker Thread (CMS)"; const char thread_name[] = "Surrogate Locker Thread (Concurrent GC)";
Handle string = java_lang_String::create_from_str(thread_name, CHECK_NULL); Handle string = java_lang_String::create_from_str(thread_name, CHECK_NULL);
// Initialize thread_oop to put it into the system threadGroup // Initialize thread_oop to put it into the system threadGroup

View File

@ -25,13 +25,13 @@
#include "incls/_precompiled.incl" #include "incls/_precompiled.incl"
#include "incls/_markSweep.cpp.incl" #include "incls/_markSweep.cpp.incl"
GrowableArray<oop>* MarkSweep::_marking_stack = NULL; Stack<oop> MarkSweep::_marking_stack;
GrowableArray<ObjArrayTask>* MarkSweep::_objarray_stack = NULL; Stack<DataLayout*> MarkSweep::_revisit_mdo_stack;
GrowableArray<Klass*>* MarkSweep::_revisit_klass_stack = NULL; Stack<Klass*> MarkSweep::_revisit_klass_stack;
GrowableArray<DataLayout*>* MarkSweep::_revisit_mdo_stack = NULL; Stack<ObjArrayTask> MarkSweep::_objarray_stack;
GrowableArray<oop>* MarkSweep::_preserved_oop_stack = NULL; Stack<oop> MarkSweep::_preserved_oop_stack;
GrowableArray<markOop>* MarkSweep::_preserved_mark_stack= NULL; Stack<markOop> MarkSweep::_preserved_mark_stack;
size_t MarkSweep::_preserved_count = 0; size_t MarkSweep::_preserved_count = 0;
size_t MarkSweep::_preserved_count_max = 0; size_t MarkSweep::_preserved_count_max = 0;
PreservedMark* MarkSweep::_preserved_marks = NULL; PreservedMark* MarkSweep::_preserved_marks = NULL;
@ -58,37 +58,42 @@ GrowableArray<size_t> * MarkSweep::_last_gc_live_oops_size = NULL;
#endif #endif
void MarkSweep::revisit_weak_klass_link(Klass* k) { void MarkSweep::revisit_weak_klass_link(Klass* k) {
_revisit_klass_stack->push(k); _revisit_klass_stack.push(k);
} }
void MarkSweep::follow_weak_klass_links() { void MarkSweep::follow_weak_klass_links() {
// All klasses on the revisit stack are marked at this point. // All klasses on the revisit stack are marked at this point.
// Update and follow all subklass, sibling and implementor links. // Update and follow all subklass, sibling and implementor links.
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("#classes in system dictionary = %d", SystemDictionary::number_of_classes()); gclog_or_tty->print_cr("#classes in system dictionary = %d",
gclog_or_tty->print_cr("Revisit klass stack length = %d", _revisit_klass_stack->length()); SystemDictionary::number_of_classes());
gclog_or_tty->print_cr("Revisit klass stack size = " SIZE_FORMAT,
_revisit_klass_stack.size());
} }
for (int i = 0; i < _revisit_klass_stack->length(); i++) { while (!_revisit_klass_stack.is_empty()) {
_revisit_klass_stack->at(i)->follow_weak_klass_links(&is_alive,&keep_alive); Klass* const k = _revisit_klass_stack.pop();
k->follow_weak_klass_links(&is_alive, &keep_alive);
} }
follow_stack(); follow_stack();
} }
void MarkSweep::revisit_mdo(DataLayout* p) { void MarkSweep::revisit_mdo(DataLayout* p) {
_revisit_mdo_stack->push(p); _revisit_mdo_stack.push(p);
} }
void MarkSweep::follow_mdo_weak_refs() { void MarkSweep::follow_mdo_weak_refs() {
// All strongly reachable oops have been marked at this point; // All strongly reachable oops have been marked at this point;
// we can visit and clear any weak references from MDO's which // we can visit and clear any weak references from MDO's which
// we memoized during the strong marking phase. // we memoized during the strong marking phase.
assert(_marking_stack->is_empty(), "Marking stack should be empty"); assert(_marking_stack.is_empty(), "Marking stack should be empty");
if (PrintRevisitStats) { if (PrintRevisitStats) {
gclog_or_tty->print_cr("#classes in system dictionary = %d", SystemDictionary::number_of_classes()); gclog_or_tty->print_cr("#classes in system dictionary = %d",
gclog_or_tty->print_cr("Revisit MDO stack length = %d", _revisit_mdo_stack->length()); SystemDictionary::number_of_classes());
gclog_or_tty->print_cr("Revisit MDO stack size = " SIZE_FORMAT,
_revisit_mdo_stack.size());
} }
for (int i = 0; i < _revisit_mdo_stack->length(); i++) { while (!_revisit_mdo_stack.is_empty()) {
_revisit_mdo_stack->at(i)->follow_weak_refs(&is_alive); _revisit_mdo_stack.pop()->follow_weak_refs(&is_alive);
} }
follow_stack(); follow_stack();
} }
@ -106,41 +111,37 @@ void MarkSweep::MarkAndPushClosure::do_oop(narrowOop* p) { mark_and_push(p); }
void MarkSweep::follow_stack() { void MarkSweep::follow_stack() {
do { do {
while (!_marking_stack->is_empty()) { while (!_marking_stack.is_empty()) {
oop obj = _marking_stack->pop(); oop obj = _marking_stack.pop();
assert (obj->is_gc_marked(), "p must be marked"); assert (obj->is_gc_marked(), "p must be marked");
obj->follow_contents(); obj->follow_contents();
} }
// Process ObjArrays one at a time to avoid marking stack bloat. // Process ObjArrays one at a time to avoid marking stack bloat.
if (!_objarray_stack->is_empty()) { if (!_objarray_stack.is_empty()) {
ObjArrayTask task = _objarray_stack->pop(); ObjArrayTask task = _objarray_stack.pop();
objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint(); objArrayKlass* const k = (objArrayKlass*)task.obj()->blueprint();
k->oop_follow_contents(task.obj(), task.index()); k->oop_follow_contents(task.obj(), task.index());
} }
} while (!_marking_stack->is_empty() || !_objarray_stack->is_empty()); } while (!_marking_stack.is_empty() || !_objarray_stack.is_empty());
} }
MarkSweep::FollowStackClosure MarkSweep::follow_stack_closure; MarkSweep::FollowStackClosure MarkSweep::follow_stack_closure;
void MarkSweep::FollowStackClosure::do_void() { follow_stack(); } void MarkSweep::FollowStackClosure::do_void() { follow_stack(); }
// We preserve the mark which should be replaced at the end and the location that it // We preserve the mark which should be replaced at the end and the location
// will go. Note that the object that this markOop belongs to isn't currently at that // that it will go. Note that the object that this markOop belongs to isn't
// address but it will be after phase4 // currently at that address but it will be after phase4
void MarkSweep::preserve_mark(oop obj, markOop mark) { void MarkSweep::preserve_mark(oop obj, markOop mark) {
// we try to store preserved marks in the to space of the new generation since this // We try to store preserved marks in the to space of the new generation since
// is storage which should be available. Most of the time this should be sufficient // this is storage which should be available. Most of the time this should be
// space for the marks we need to preserve but if it isn't we fall back in using // sufficient space for the marks we need to preserve but if it isn't we fall
// GrowableArrays to keep track of the overflow. // back to using Stacks to keep track of the overflow.
if (_preserved_count < _preserved_count_max) { if (_preserved_count < _preserved_count_max) {
_preserved_marks[_preserved_count++].init(obj, mark); _preserved_marks[_preserved_count++].init(obj, mark);
} else { } else {
if (_preserved_mark_stack == NULL) { _preserved_mark_stack.push(mark);
_preserved_mark_stack = new (ResourceObj::C_HEAP) GrowableArray<markOop>(40, true); _preserved_oop_stack.push(obj);
_preserved_oop_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(40, true);
}
_preserved_mark_stack->push(mark);
_preserved_oop_stack->push(obj);
} }
} }
@ -151,8 +152,7 @@ void MarkSweep::AdjustPointerClosure::do_oop(oop* p) { adjust_pointer(p, _
void MarkSweep::AdjustPointerClosure::do_oop(narrowOop* p) { adjust_pointer(p, _is_root); } void MarkSweep::AdjustPointerClosure::do_oop(narrowOop* p) { adjust_pointer(p, _is_root); }
void MarkSweep::adjust_marks() { void MarkSweep::adjust_marks() {
assert(_preserved_oop_stack == NULL || assert( _preserved_oop_stack.size() == _preserved_mark_stack.size(),
_preserved_oop_stack->length() == _preserved_mark_stack->length(),
"inconsistent preserved oop stacks"); "inconsistent preserved oop stacks");
// adjust the oops we saved earlier // adjust the oops we saved earlier
@ -161,21 +161,19 @@ void MarkSweep::adjust_marks() {
} }
// deal with the overflow stack // deal with the overflow stack
if (_preserved_oop_stack) { StackIterator<oop> iter(_preserved_oop_stack);
for (int i = 0; i < _preserved_oop_stack->length(); i++) { while (!iter.is_empty()) {
oop* p = _preserved_oop_stack->adr_at(i); oop* p = iter.next_addr();
adjust_pointer(p); adjust_pointer(p);
}
} }
} }
void MarkSweep::restore_marks() { void MarkSweep::restore_marks() {
assert(_preserved_oop_stack == NULL || assert(_preserved_oop_stack.size() == _preserved_mark_stack.size(),
_preserved_oop_stack->length() == _preserved_mark_stack->length(),
"inconsistent preserved oop stacks"); "inconsistent preserved oop stacks");
if (PrintGC && Verbose) { if (PrintGC && Verbose) {
gclog_or_tty->print_cr("Restoring %d marks", _preserved_count + gclog_or_tty->print_cr("Restoring %d marks",
(_preserved_oop_stack ? _preserved_oop_stack->length() : 0)); _preserved_count + _preserved_oop_stack.size());
} }
// restore the marks we saved earlier // restore the marks we saved earlier
@ -184,12 +182,10 @@ void MarkSweep::restore_marks() {
} }
// deal with the overflow // deal with the overflow
if (_preserved_oop_stack) { while (!_preserved_oop_stack.is_empty()) {
for (int i = 0; i < _preserved_oop_stack->length(); i++) { oop obj = _preserved_oop_stack.pop();
oop obj = _preserved_oop_stack->at(i); markOop mark = _preserved_mark_stack.pop();
markOop mark = _preserved_mark_stack->at(i); obj->set_mark(mark);
obj->set_mark(mark);
}
} }
} }

View File

@ -104,23 +104,22 @@ class MarkSweep : AllStatic {
friend class KeepAliveClosure; friend class KeepAliveClosure;
friend class VM_MarkSweep; friend class VM_MarkSweep;
friend void marksweep_init(); friend void marksweep_init();
friend class DataLayout;
// //
// Vars // Vars
// //
protected: protected:
// Traversal stacks used during phase1 // Traversal stacks used during phase1
static GrowableArray<oop>* _marking_stack; static Stack<oop> _marking_stack;
static GrowableArray<ObjArrayTask>* _objarray_stack; static Stack<ObjArrayTask> _objarray_stack;
// Stack for live klasses to revisit at end of marking phase // Stack for live klasses to revisit at end of marking phase
static GrowableArray<Klass*>* _revisit_klass_stack; static Stack<Klass*> _revisit_klass_stack;
// Set (stack) of MDO's to revisit at end of marking phase // Set (stack) of MDO's to revisit at end of marking phase
static GrowableArray<DataLayout*>* _revisit_mdo_stack; static Stack<DataLayout*> _revisit_mdo_stack;
// Space for storing/restoring mark word // Space for storing/restoring mark word
static GrowableArray<markOop>* _preserved_mark_stack; static Stack<markOop> _preserved_mark_stack;
static GrowableArray<oop>* _preserved_oop_stack; static Stack<oop> _preserved_oop_stack;
static size_t _preserved_count; static size_t _preserved_count;
static size_t _preserved_count_max; static size_t _preserved_count_max;
static PreservedMark* _preserved_marks; static PreservedMark* _preserved_marks;

View File

@ -72,7 +72,7 @@ template <class T> inline void MarkSweep::mark_and_push(T* p) {
oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
if (!obj->mark()->is_marked()) { if (!obj->mark()->is_marked()) {
mark_object(obj); mark_object(obj);
_marking_stack->push(obj); _marking_stack.push(obj);
} }
} }
} }
@ -80,7 +80,7 @@ template <class T> inline void MarkSweep::mark_and_push(T* p) {
void MarkSweep::push_objarray(oop obj, size_t index) { void MarkSweep::push_objarray(oop obj, size_t index) {
ObjArrayTask task(obj, index); ObjArrayTask task(obj, index);
assert(task.is_valid(), "bad ObjArrayTask"); assert(task.is_valid(), "bad ObjArrayTask");
_objarray_stack->push(task); _objarray_stack.push(task);
} }
template <class T> inline void MarkSweep::adjust_pointer(T* p, bool isroot) { template <class T> inline void MarkSweep::adjust_pointer(T* p, bool isroot) {

Some files were not shown because too many files have changed in this diff Show More