This commit is contained in:
J. Duke 2017-07-05 22:23:57 +02:00
commit e4b6aa6eea
997 changed files with 18927 additions and 13085 deletions

View File

@ -384,3 +384,4 @@ d7f519b004254b19e384131d9f0d0e40e31a0fd3 jdk-9+137
7dcf453eacae79ee86a6bcc75fd0b546fc99b48a jdk-9+139 7dcf453eacae79ee86a6bcc75fd0b546fc99b48a jdk-9+139
a5815c6098a241d3a1df64d22b84b3524e4a77df jdk-9+140 a5815c6098a241d3a1df64d22b84b3524e4a77df jdk-9+140
f64afae7f1a5608e438585bbf0bc23785e69cba0 jdk-9+141 f64afae7f1a5608e438585bbf0bc23785e69cba0 jdk-9+141
2b3e5caafe3594ea507c37675c4d3086f415dc64 jdk-9+142

View File

@ -759,6 +759,10 @@ AC_DEFUN([FLAGS_SETUP_COMPILER_FLAGS_FOR_JDK_HELPER],
# on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing # on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing
$2CFLAGS_JDK="${$2CFLAGS_JDK} -fno-strict-aliasing" $2CFLAGS_JDK="${$2CFLAGS_JDK} -fno-strict-aliasing"
;; ;;
s390 )
$2COMMON_CCXXFLAGS_JDK="[$]$2COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer -mbackchain -march=z10"
$2CFLAGS_JDK="${$2CFLAGS_JDK} -fno-strict-aliasing"
;;
* ) * )
$2COMMON_CCXXFLAGS_JDK="[$]$2COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer" $2COMMON_CCXXFLAGS_JDK="[$]$2COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer"
$2CFLAGS_JDK="${$2CFLAGS_JDK} -fno-strict-aliasing" $2CFLAGS_JDK="${$2CFLAGS_JDK} -fno-strict-aliasing"
@ -940,6 +944,10 @@ AC_DEFUN([FLAGS_SETUP_COMPILER_FLAGS_FOR_JDK_HELPER],
# Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI. # Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI.
$2JVM_CFLAGS="[$]$2JVM_CFLAGS -mcpu=power7 -mtune=power8" $2JVM_CFLAGS="[$]$2JVM_CFLAGS -mcpu=power7 -mtune=power8"
fi fi
elif test "x$OPENJDK_$1_CPU" = xs390x; then
if test "x$OPENJDK_$1_OS" = xlinux; then
$2JVM_CFLAGS="[$]$2JVM_CFLAGS -mbackchain -march=z10"
fi
fi fi
if test "x$OPENJDK_$1_CPU_ENDIAN" = xlittle; then if test "x$OPENJDK_$1_CPU_ENDIAN" = xlittle; then
@ -999,6 +1007,7 @@ AC_DEFUN([FLAGS_SETUP_COMPILER_FLAGS_FOR_JDK_HELPER],
# Setup some hard coded includes # Setup some hard coded includes
$2COMMON_CCXXFLAGS_JDK="[$]$2COMMON_CCXXFLAGS_JDK \ $2COMMON_CCXXFLAGS_JDK="[$]$2COMMON_CCXXFLAGS_JDK \
-I\$(SUPPORT_OUTPUTDIR)/modules_include/java.base \
-I${JDK_TOPDIR}/src/java.base/share/native/include \ -I${JDK_TOPDIR}/src/java.base/share/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_$1_OS/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_$1_OS/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_$1_OS_TYPE/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_$1_OS_TYPE/native/include \

View File

@ -5093,7 +5093,7 @@ VS_SDK_PLATFORM_NAME_2013=
#CUSTOM_AUTOCONF_INCLUDE #CUSTOM_AUTOCONF_INCLUDE
# Do not change or remove the following line, it is needed for consistency checks: # Do not change or remove the following line, it is needed for consistency checks:
DATE_WHEN_GENERATED=1477108079 DATE_WHEN_GENERATED=1478079760
############################################################################### ###############################################################################
# #
@ -49840,6 +49840,10 @@ $as_echo "$supports" >&6; }
# on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing # on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing
CFLAGS_JDK="${CFLAGS_JDK} -fno-strict-aliasing" CFLAGS_JDK="${CFLAGS_JDK} -fno-strict-aliasing"
;; ;;
s390 )
COMMON_CCXXFLAGS_JDK="$COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer -mbackchain -march=z10"
CFLAGS_JDK="${CFLAGS_JDK} -fno-strict-aliasing"
;;
* ) * )
COMMON_CCXXFLAGS_JDK="$COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer" COMMON_CCXXFLAGS_JDK="$COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer"
CFLAGS_JDK="${CFLAGS_JDK} -fno-strict-aliasing" CFLAGS_JDK="${CFLAGS_JDK} -fno-strict-aliasing"
@ -50122,6 +50126,10 @@ $as_echo "$as_me: GCC >= 6 detected; adding ${NO_DELETE_NULL_POINTER_CHECKS_CFLA
# Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI. # Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI.
JVM_CFLAGS="$JVM_CFLAGS -mcpu=power7 -mtune=power8" JVM_CFLAGS="$JVM_CFLAGS -mcpu=power7 -mtune=power8"
fi fi
elif test "x$OPENJDK_TARGET_CPU" = xs390x; then
if test "x$OPENJDK_TARGET_OS" = xlinux; then
JVM_CFLAGS="$JVM_CFLAGS -mbackchain -march=z10"
fi
fi fi
if test "x$OPENJDK_TARGET_CPU_ENDIAN" = xlittle; then if test "x$OPENJDK_TARGET_CPU_ENDIAN" = xlittle; then
@ -50270,6 +50278,7 @@ $as_echo "$as_me: GCC >= 6 detected; adding ${NO_DELETE_NULL_POINTER_CHECKS_CFLA
# Setup some hard coded includes # Setup some hard coded includes
COMMON_CCXXFLAGS_JDK="$COMMON_CCXXFLAGS_JDK \ COMMON_CCXXFLAGS_JDK="$COMMON_CCXXFLAGS_JDK \
-I\$(SUPPORT_OUTPUTDIR)/modules_include/java.base \
-I${JDK_TOPDIR}/src/java.base/share/native/include \ -I${JDK_TOPDIR}/src/java.base/share/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_TARGET_OS/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_TARGET_OS/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_TARGET_OS_TYPE/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_TARGET_OS_TYPE/native/include \
@ -50655,6 +50664,10 @@ $as_echo "$supports" >&6; }
# on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing # on ppc we don't prevent gcc to omit frame pointer but do prevent strict aliasing
OPENJDK_BUILD_CFLAGS_JDK="${OPENJDK_BUILD_CFLAGS_JDK} -fno-strict-aliasing" OPENJDK_BUILD_CFLAGS_JDK="${OPENJDK_BUILD_CFLAGS_JDK} -fno-strict-aliasing"
;; ;;
s390 )
OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK="$OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer -mbackchain -march=z10"
OPENJDK_BUILD_CFLAGS_JDK="${OPENJDK_BUILD_CFLAGS_JDK} -fno-strict-aliasing"
;;
* ) * )
OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK="$OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer" OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK="$OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK -fno-omit-frame-pointer"
OPENJDK_BUILD_CFLAGS_JDK="${OPENJDK_BUILD_CFLAGS_JDK} -fno-strict-aliasing" OPENJDK_BUILD_CFLAGS_JDK="${OPENJDK_BUILD_CFLAGS_JDK} -fno-strict-aliasing"
@ -50937,6 +50950,10 @@ $as_echo "$as_me: GCC >= 6 detected; adding ${NO_DELETE_NULL_POINTER_CHECKS_CFLA
# Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI. # Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI.
OPENJDK_BUILD_JVM_CFLAGS="$OPENJDK_BUILD_JVM_CFLAGS -mcpu=power7 -mtune=power8" OPENJDK_BUILD_JVM_CFLAGS="$OPENJDK_BUILD_JVM_CFLAGS -mcpu=power7 -mtune=power8"
fi fi
elif test "x$OPENJDK_BUILD_CPU" = xs390x; then
if test "x$OPENJDK_BUILD_OS" = xlinux; then
OPENJDK_BUILD_JVM_CFLAGS="$OPENJDK_BUILD_JVM_CFLAGS -mbackchain -march=z10"
fi
fi fi
if test "x$OPENJDK_BUILD_CPU_ENDIAN" = xlittle; then if test "x$OPENJDK_BUILD_CPU_ENDIAN" = xlittle; then
@ -51085,6 +51102,7 @@ $as_echo "$as_me: GCC >= 6 detected; adding ${NO_DELETE_NULL_POINTER_CHECKS_CFLA
# Setup some hard coded includes # Setup some hard coded includes
OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK="$OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK \ OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK="$OPENJDK_BUILD_COMMON_CCXXFLAGS_JDK \
-I\$(SUPPORT_OUTPUTDIR)/modules_include/java.base \
-I${JDK_TOPDIR}/src/java.base/share/native/include \ -I${JDK_TOPDIR}/src/java.base/share/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_BUILD_OS/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_BUILD_OS/native/include \
-I${JDK_TOPDIR}/src/java.base/$OPENJDK_BUILD_OS_TYPE/native/include \ -I${JDK_TOPDIR}/src/java.base/$OPENJDK_BUILD_OS_TYPE/native/include \

View File

@ -265,6 +265,10 @@ IMAGES_OUTPUTDIR=$(BUILD_OUTPUT)/images
BUNDLES_OUTPUTDIR=$(BUILD_OUTPUT)/bundles BUNDLES_OUTPUTDIR=$(BUILD_OUTPUT)/bundles
TESTMAKE_OUTPUTDIR=$(BUILD_OUTPUT)/test-make TESTMAKE_OUTPUTDIR=$(BUILD_OUTPUT)/test-make
MAKESUPPORT_OUTPUTDIR=$(BUILD_OUTPUT)/make-support MAKESUPPORT_OUTPUTDIR=$(BUILD_OUTPUT)/make-support
# By default, output javadoc directly into image
JAVADOC_OUTPUTDIR = $(DOCS_IMAGE_DIR)
# This does not get overridden in a bootcycle build # This does not get overridden in a bootcycle build
CONFIGURESUPPORT_OUTPUTDIR:=@CONFIGURESUPPORT_OUTPUTDIR@ CONFIGURESUPPORT_OUTPUTDIR:=@CONFIGURESUPPORT_OUTPUTDIR@
BUILDJDK_OUTPUTDIR=$(BUILD_OUTPUT)/buildjdk BUILDJDK_OUTPUTDIR=$(BUILD_OUTPUT)/buildjdk
@ -788,7 +792,7 @@ INTERIM_IMAGE_DIR := $(SUPPORT_OUTPUTDIR)/interim-image
# Docs image # Docs image
DOCS_IMAGE_SUBDIR := docs DOCS_IMAGE_SUBDIR := docs
DOCS_IMAGE_DIR := $(IMAGES_OUTPUTDIR)/$(DOCS_IMAGE_SUBDIR) DOCS_IMAGE_DIR = $(IMAGES_OUTPUTDIR)/$(DOCS_IMAGE_SUBDIR)
# Macosx bundles directory definitions # Macosx bundles directory definitions
JDK_MACOSX_BUNDLE_SUBDIR=jdk-bundle JDK_MACOSX_BUNDLE_SUBDIR=jdk-bundle

View File

@ -544,3 +544,4 @@ fc0956308c7a586267c5dd35dff74f773aa9c3eb jdk-9+138
08492e67bf3226784dab3bf9ae967382ddbc1af5 jdk-9+139 08492e67bf3226784dab3bf9ae967382ddbc1af5 jdk-9+139
fec31089c2ef5a12dd64f401b0bf2e00f56ee0d0 jdk-9+140 fec31089c2ef5a12dd64f401b0bf2e00f56ee0d0 jdk-9+140
160a00bc6ed0af1fdf8418fc65e6bddbbc0c536d jdk-9+141 160a00bc6ed0af1fdf8418fc65e6bddbbc0c536d jdk-9+141
7b48d63dfd6b8e2657288de3d7b1f153dee02d7e jdk-9+142

View File

@ -135,14 +135,14 @@ TARGETS += $(JVMTI_OUTPUTDIR)/jvmtiEnvRecommended.cpp
# Copy jvmti.h to include dir # Copy jvmti.h to include dir
# The file is the same regardless of jvm variant. Only let one do the copy. # The file is the same regardless of jvm variant. Only let one do the copy.
#ifeq ($(JVM_VARIANT), $(firstword $(JVM_VARIANTS))) ifeq ($(JVM_VARIANT), $(firstword $(JVM_VARIANTS)))
# $(eval $(call SetupCopyFiles, COPY_JVMTI_H, \ $(eval $(call SetupCopyFiles, COPY_JVMTI_H, \
# DEST := $(SUPPORT_OUTPUTDIR)/modules_include/java.base, \ DEST := $(SUPPORT_OUTPUTDIR)/modules_include/java.base, \
# FILES := $(JVMTI_OUTPUTDIR)/jvmti.h, \ FILES := $(JVMTI_OUTPUTDIR)/jvmti.h, \
# )) ))
# TARGETS += $(COPY_JVMTI_H) TARGETS += $(COPY_JVMTI_H)
#endif endif
################################################################################ ################################################################################
# Create trace files in gensrc/tracefiles # Create trace files in gensrc/tracefiles

View File

@ -176,6 +176,11 @@ endif
JVM_OPTIMIZATION ?= HIGHEST_JVM JVM_OPTIMIZATION ?= HIGHEST_JVM
# Need to set JVM_STRIPFLAGS to the default value from SPEC since the STRIPFLAGS
# parameter to SetupNativeCompilation allows an empty value to override the
# default.
JVM_STRIPFLAGS ?= $(STRIPFLAGS)
################################################################################ ################################################################################
# Now set up the actual compilation of the main hotspot native library # Now set up the actual compilation of the main hotspot native library
@ -204,6 +209,7 @@ $(eval $(call SetupNativeCompilation, BUILD_LIBJVM, \
OBJECT_DIR := $(JVM_OUTPUTDIR)/objs, \ OBJECT_DIR := $(JVM_OUTPUTDIR)/objs, \
MAPFILE := $(JVM_MAPFILE), \ MAPFILE := $(JVM_MAPFILE), \
USE_MAPFILE_FOR_SYMBOLS := true, \ USE_MAPFILE_FOR_SYMBOLS := true, \
STRIPFLAGS := $(JVM_STRIPFLAGS), \
EMBED_MANIFEST := true, \ EMBED_MANIFEST := true, \
RC_FLAGS := $(JVM_RCFLAGS), \ RC_FLAGS := $(JVM_RCFLAGS), \
VERSIONINFO_RESOURCE := $(HOTSPOT_TOPDIR)/src/os/windows/vm/version.rc, \ VERSIONINFO_RESOURCE := $(HOTSPOT_TOPDIR)/src/os/windows/vm/version.rc, \

View File

@ -59,6 +59,10 @@ endif
ifeq ($(call check-jvm-feature, minimal), true) ifeq ($(call check-jvm-feature, minimal), true)
JVM_CFLAGS_FEATURES += -DMINIMAL_JVM -DVMTYPE=\"Minimal\" JVM_CFLAGS_FEATURES += -DMINIMAL_JVM -DVMTYPE=\"Minimal\"
ifeq ($(OPENJDK_TARGET_OS), linux)
# Override the default -g with a more liberal strip policy for the minimal JVM
JVM_STRIPFLAGS := --strip-unneeded
endif
endif endif
ifeq ($(call check-jvm-feature, dtrace), true) ifeq ($(call check-jvm-feature, dtrace), true)

View File

@ -45,6 +45,7 @@ BUILD_HOTSPOT_JTREG_NATIVE_SRC := \
$(HOTSPOT_TOPDIR)/test/runtime/jni/8025979 \ $(HOTSPOT_TOPDIR)/test/runtime/jni/8025979 \
$(HOTSPOT_TOPDIR)/test/runtime/jni/8033445 \ $(HOTSPOT_TOPDIR)/test/runtime/jni/8033445 \
$(HOTSPOT_TOPDIR)/test/runtime/jni/checked \ $(HOTSPOT_TOPDIR)/test/runtime/jni/checked \
$(HOTSPOT_TOPDIR)/test/runtime/jni/PrivateInterfaceMethods \
$(HOTSPOT_TOPDIR)/test/runtime/jni/ToStringInInterfaceTest \ $(HOTSPOT_TOPDIR)/test/runtime/jni/ToStringInInterfaceTest \
$(HOTSPOT_TOPDIR)/test/runtime/modules/getModuleJNI \ $(HOTSPOT_TOPDIR)/test/runtime/modules/getModuleJNI \
$(HOTSPOT_TOPDIR)/test/runtime/SameObject \ $(HOTSPOT_TOPDIR)/test/runtime/SameObject \

View File

@ -3496,6 +3496,16 @@ bool Matcher::narrow_klass_use_complex_address() {
return false; return false;
} }
bool Matcher::const_oop_prefer_decode() {
// Prefer ConN+DecodeN over ConP in simple compressed oops mode.
return Universe::narrow_oop_base() == NULL;
}
bool Matcher::const_klass_prefer_decode() {
// Prefer ConNKlass+DecodeNKlass over ConP in simple compressed klass mode.
return Universe::narrow_klass_base() == NULL;
}
// Is it better to copy float constants, or load them directly from // Is it better to copy float constants, or load them directly from
// memory? Intel can load a float constant from a direct address, // memory? Intel can load a float constant from a direct address,
// requiring no extra registers. Most RISCs will have to materialize // requiring no extra registers. Most RISCs will have to materialize
@ -15502,6 +15512,24 @@ instruct string_indexof_conLU(iRegP_R1 str1, iRegI_R4 cnt1, iRegP_R3 str2,
ins_pipe(pipe_class_memory); ins_pipe(pipe_class_memory);
%} %}
instruct string_indexofU_char(iRegP_R1 str1, iRegI_R2 cnt1, iRegI_R3 ch,
iRegI_R0 result, iRegI tmp1, iRegI tmp2,
iRegI tmp3, rFlagsReg cr)
%{
match(Set result (StrIndexOfChar (Binary str1 cnt1) ch));
effect(USE_KILL str1, USE_KILL cnt1, USE_KILL ch,
TEMP tmp1, TEMP tmp2, TEMP tmp3, KILL cr);
format %{ "String IndexOf char[] $str1,$cnt1,$ch -> $result" %}
ins_encode %{
__ string_indexof_char($str1$$Register, $cnt1$$Register, $ch$$Register,
$result$$Register, $tmp1$$Register, $tmp2$$Register,
$tmp3$$Register);
%}
ins_pipe(pipe_class_memory);
%}
instruct string_equalsL(iRegP_R1 str1, iRegP_R3 str2, iRegI_R4 cnt, instruct string_equalsL(iRegP_R1 str1, iRegP_R3 str2, iRegI_R4 cnt,
iRegI_R0 result, rFlagsReg cr) iRegI_R0 result, rFlagsReg cr)
%{ %{

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved. * Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
@ -130,8 +130,8 @@ void InterpreterRuntime::SignatureHandlerGenerator::pass_float() {
if (_num_fp_args < Argument::n_float_register_parameters_c) { if (_num_fp_args < Argument::n_float_register_parameters_c) {
__ ldrs(as_FloatRegister(_num_fp_args++), src); __ ldrs(as_FloatRegister(_num_fp_args++), src);
} else { } else {
__ ldrh(r0, src); __ ldrw(r0, src);
__ strh(r0, Address(to(), _stack_offset)); __ strw(r0, Address(to(), _stack_offset));
_stack_offset += wordSize; _stack_offset += wordSize;
_num_fp_args++; _num_fp_args++;
} }
@ -349,7 +349,7 @@ class SlowSignatureHandler
_num_fp_args++; _num_fp_args++;
} else { } else {
*_to++ = from_obj; *_to++ = from_obj;
_num_int_args++; _num_fp_args++;
} }
} }
@ -364,7 +364,7 @@ class SlowSignatureHandler
_num_fp_args++; _num_fp_args++;
} else { } else {
*_to++ = from_obj; *_to++ = from_obj;
_num_int_args++; _num_fp_args++;
} }
} }

View File

@ -60,12 +60,12 @@ void CodeInstaller::pd_patch_OopConstant(int pc_offset, Handle constant, TRAPS)
void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, TRAPS) { void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, TRAPS) {
address pc = _instructions->start() + pc_offset; address pc = _instructions->start() + pc_offset;
if (HotSpotMetaspaceConstantImpl::compressed(constant)) { if (HotSpotMetaspaceConstantImpl::compressed(constant)) {
narrowKlass narrowOop = record_narrow_metadata_reference(constant, CHECK); narrowKlass narrowOop = record_narrow_metadata_reference(_instructions, pc, constant, CHECK);
TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/0x%x", p2i(pc), narrowOop); TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/0x%x", p2i(pc), narrowOop);
Unimplemented(); Unimplemented();
} else { } else {
NativeMovConstReg* move = nativeMovConstReg_at(pc); NativeMovConstReg* move = nativeMovConstReg_at(pc);
void* reference = record_metadata_reference(constant, CHECK); void* reference = record_metadata_reference(_instructions, pc, constant, CHECK);
move->set_data((intptr_t) reference); move->set_data((intptr_t) reference);
TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(reference)); TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(reference));
} }

View File

@ -4508,6 +4508,67 @@ void MacroAssembler::string_indexof(Register str2, Register str1,
typedef void (MacroAssembler::* chr_insn)(Register Rt, const Address &adr); typedef void (MacroAssembler::* chr_insn)(Register Rt, const Address &adr);
typedef void (MacroAssembler::* uxt_insn)(Register Rd, Register Rn); typedef void (MacroAssembler::* uxt_insn)(Register Rd, Register Rn);
void MacroAssembler::string_indexof_char(Register str1, Register cnt1,
Register ch, Register result,
Register tmp1, Register tmp2, Register tmp3)
{
Label CH1_LOOP, HAS_ZERO, DO1_SHORT, DO1_LOOP, MATCH, NOMATCH, DONE;
Register cnt1_neg = cnt1;
Register ch1 = rscratch1;
Register result_tmp = rscratch2;
cmp(cnt1, 4);
br(LT, DO1_SHORT);
orr(ch, ch, ch, LSL, 16);
orr(ch, ch, ch, LSL, 32);
sub(cnt1, cnt1, 4);
mov(result_tmp, cnt1);
lea(str1, Address(str1, cnt1, Address::uxtw(1)));
sub(cnt1_neg, zr, cnt1, LSL, 1);
mov(tmp3, 0x0001000100010001);
BIND(CH1_LOOP);
ldr(ch1, Address(str1, cnt1_neg));
eor(ch1, ch, ch1);
sub(tmp1, ch1, tmp3);
orr(tmp2, ch1, 0x7fff7fff7fff7fff);
bics(tmp1, tmp1, tmp2);
br(NE, HAS_ZERO);
adds(cnt1_neg, cnt1_neg, 8);
br(LT, CH1_LOOP);
cmp(cnt1_neg, 8);
mov(cnt1_neg, 0);
br(LT, CH1_LOOP);
b(NOMATCH);
BIND(HAS_ZERO);
rev(tmp1, tmp1);
clz(tmp1, tmp1);
add(cnt1_neg, cnt1_neg, tmp1, LSR, 3);
b(MATCH);
BIND(DO1_SHORT);
mov(result_tmp, cnt1);
lea(str1, Address(str1, cnt1, Address::uxtw(1)));
sub(cnt1_neg, zr, cnt1, LSL, 1);
BIND(DO1_LOOP);
ldrh(ch1, Address(str1, cnt1_neg));
cmpw(ch, ch1);
br(EQ, MATCH);
adds(cnt1_neg, cnt1_neg, 2);
br(LT, DO1_LOOP);
BIND(NOMATCH);
mov(result, -1);
b(DONE);
BIND(MATCH);
add(result, result_tmp, cnt1_neg, ASR, 1);
BIND(DONE);
}
// Compare strings. // Compare strings.
void MacroAssembler::string_compare(Register str1, Register str2, void MacroAssembler::string_compare(Register str1, Register str2,
Register cnt1, Register cnt2, Register result, Register cnt1, Register cnt2, Register result,

View File

@ -1229,6 +1229,9 @@ public:
Register tmp1, Register tmp2, Register tmp1, Register tmp2,
Register tmp3, Register tmp4, Register tmp3, Register tmp4,
int int_cnt1, Register result, int ae); int int_cnt1, Register result, int ae);
void string_indexof_char(Register str1, Register cnt1,
Register ch, Register result,
Register tmp1, Register tmp2, Register tmp3);
private: private:
void add2_with_carry(Register final_dest_hi, Register dest_hi, Register dest_lo, void add2_with_carry(Register final_dest_hi, Register dest_hi, Register dest_lo,
Register src1, Register src2); Register src1, Register src2);

View File

@ -989,7 +989,16 @@ static void object_move(MacroAssembler* masm,
// A float arg may have to do float reg int reg conversion // A float arg may have to do float reg int reg conversion
static void float_move(MacroAssembler* masm, VMRegPair src, VMRegPair dst) { static void float_move(MacroAssembler* masm, VMRegPair src, VMRegPair dst) {
if (src.first() != dst.first()) { assert(src.first()->is_stack() && dst.first()->is_stack() ||
src.first()->is_reg() && dst.first()->is_reg(), "Unexpected error");
if (src.first()->is_stack()) {
if (dst.first()->is_stack()) {
__ ldrw(rscratch1, Address(rfp, reg2offset_in(src.first())));
__ strw(rscratch1, Address(sp, reg2offset_out(dst.first())));
} else {
ShouldNotReachHere();
}
} else if (src.first() != dst.first()) {
if (src.is_single_phys_reg() && dst.is_single_phys_reg()) if (src.is_single_phys_reg() && dst.is_single_phys_reg())
__ fmovs(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister()); __ fmovs(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister());
else else
@ -1023,7 +1032,16 @@ static void long_move(MacroAssembler* masm, VMRegPair src, VMRegPair dst) {
// A double move // A double move
static void double_move(MacroAssembler* masm, VMRegPair src, VMRegPair dst) { static void double_move(MacroAssembler* masm, VMRegPair src, VMRegPair dst) {
if (src.first() != dst.first()) { assert(src.first()->is_stack() && dst.first()->is_stack() ||
src.first()->is_reg() && dst.first()->is_reg(), "Unexpected error");
if (src.first()->is_stack()) {
if (dst.first()->is_stack()) {
__ ldr(rscratch1, Address(rfp, reg2offset_in(src.first())));
__ str(rscratch1, Address(sp, reg2offset_out(dst.first())));
} else {
ShouldNotReachHere();
}
} else if (src.first() != dst.first()) {
if (src.is_single_phys_reg() && dst.is_single_phys_reg()) if (src.is_single_phys_reg() && dst.is_single_phys_reg())
__ fmovd(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister()); __ fmovd(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister());
else else

View File

@ -2743,7 +2743,7 @@ class StubGenerator: public StubCodeGenerator {
__ align(CodeEntryAlignment); __ align(CodeEntryAlignment);
StubCodeMark mark(this, "StubRoutines", "cipherBlockChaining_encryptAESCrypt"); StubCodeMark mark(this, "StubRoutines", "cipherBlockChaining_encryptAESCrypt");
Label L_loadkeys_44, L_loadkeys_52, L_aes_loop, L_rounds_44, L_rounds_52; Label L_loadkeys_44, L_loadkeys_52, L_aes_loop, L_rounds_44, L_rounds_52, _L_finish;
const Register from = c_rarg0; // source array address const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address const Register to = c_rarg1; // destination array address
@ -2754,9 +2754,12 @@ class StubGenerator: public StubCodeGenerator {
const Register keylen = rscratch1; const Register keylen = rscratch1;
address start = __ pc(); address start = __ pc();
__ enter(); __ enter();
__ mov(rscratch2, len_reg); __ subsw(rscratch2, len_reg, zr);
__ br(Assembler::LE, _L_finish);
__ ldrw(keylen, Address(key, arrayOopDesc::length_offset_in_bytes() - arrayOopDesc::base_offset_in_bytes(T_INT))); __ ldrw(keylen, Address(key, arrayOopDesc::length_offset_in_bytes() - arrayOopDesc::base_offset_in_bytes(T_INT)));
__ ld1(v0, __ T16B, rvec); __ ld1(v0, __ T16B, rvec);
@ -2814,11 +2817,13 @@ class StubGenerator: public StubCodeGenerator {
__ eor(v0, __ T16B, v0, v31); __ eor(v0, __ T16B, v0, v31);
__ st1(v0, __ T16B, __ post(to, 16)); __ st1(v0, __ T16B, __ post(to, 16));
__ sub(len_reg, len_reg, 16);
__ cbnz(len_reg, L_aes_loop); __ subw(len_reg, len_reg, 16);
__ cbnzw(len_reg, L_aes_loop);
__ st1(v0, __ T16B, rvec); __ st1(v0, __ T16B, rvec);
__ BIND(_L_finish);
__ mov(r0, rscratch2); __ mov(r0, rscratch2);
__ leave(); __ leave();
@ -2844,7 +2849,7 @@ class StubGenerator: public StubCodeGenerator {
__ align(CodeEntryAlignment); __ align(CodeEntryAlignment);
StubCodeMark mark(this, "StubRoutines", "cipherBlockChaining_decryptAESCrypt"); StubCodeMark mark(this, "StubRoutines", "cipherBlockChaining_decryptAESCrypt");
Label L_loadkeys_44, L_loadkeys_52, L_aes_loop, L_rounds_44, L_rounds_52; Label L_loadkeys_44, L_loadkeys_52, L_aes_loop, L_rounds_44, L_rounds_52, _L_finish;
const Register from = c_rarg0; // source array address const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address const Register to = c_rarg1; // destination array address
@ -2855,9 +2860,12 @@ class StubGenerator: public StubCodeGenerator {
const Register keylen = rscratch1; const Register keylen = rscratch1;
address start = __ pc(); address start = __ pc();
__ enter(); __ enter();
__ mov(rscratch2, len_reg); __ subsw(rscratch2, len_reg, zr);
__ br(Assembler::LE, _L_finish);
__ ldrw(keylen, Address(key, arrayOopDesc::length_offset_in_bytes() - arrayOopDesc::base_offset_in_bytes(T_INT))); __ ldrw(keylen, Address(key, arrayOopDesc::length_offset_in_bytes() - arrayOopDesc::base_offset_in_bytes(T_INT)));
__ ld1(v2, __ T16B, rvec); __ ld1(v2, __ T16B, rvec);
@ -2920,11 +2928,12 @@ class StubGenerator: public StubCodeGenerator {
__ st1(v0, __ T16B, __ post(to, 16)); __ st1(v0, __ T16B, __ post(to, 16));
__ orr(v2, __ T16B, v1, v1); __ orr(v2, __ T16B, v1, v1);
__ sub(len_reg, len_reg, 16); __ subw(len_reg, len_reg, 16);
__ cbnz(len_reg, L_aes_loop); __ cbnzw(len_reg, L_aes_loop);
__ st1(v2, __ T16B, rvec); __ st1(v2, __ T16B, rvec);
__ BIND(_L_finish);
__ mov(r0, rscratch2); __ mov(r0, rscratch2);
__ leave(); __ leave();

View File

@ -454,8 +454,9 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state,
__ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize)); __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
// Check if we need to take lock at entry of synchronized method. // Check if we need to take lock at entry of synchronized method. This can
if (UseJVMCICompiler) { // only occur on method entry so emit it only for vtos with step 0.
if (UseJVMCICompiler && state == vtos && step == 0) {
Label L; Label L;
__ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset())); __ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset()));
__ cbz(rscratch1, L); __ cbz(rscratch1, L);
@ -464,8 +465,17 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state,
// Take lock. // Take lock.
lock_method(); lock_method();
__ bind(L); __ bind(L);
} } else {
#ifdef ASSERT
if (UseJVMCICompiler) {
Label L;
__ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset()));
__ cbz(rscratch1, L);
__ stop("unexpected pending monitor in deopt entry");
__ bind(L);
}
#endif #endif
}
// handle exceptions // handle exceptions
{ {
Label L; Label L;

View File

@ -3717,19 +3717,15 @@ void TemplateTable::monitorenter()
// allocate one if there's no free slot // allocate one if there's no free slot
{ {
Label entry, loop, no_adjust; Label entry, loop;
// 1. compute new pointers // rsp: old expression stack top // 1. compute new pointers // rsp: old expression stack top
__ ldr(c_rarg1, monitor_block_bot); // c_rarg1: old expression stack bottom __ ldr(c_rarg1, monitor_block_bot); // c_rarg1: old expression stack bottom
__ sub(esp, esp, entry_size); // move expression stack top __ sub(esp, esp, entry_size); // move expression stack top
__ sub(c_rarg1, c_rarg1, entry_size); // move expression stack bottom __ sub(c_rarg1, c_rarg1, entry_size); // move expression stack bottom
__ mov(c_rarg3, esp); // set start value for copy loop __ mov(c_rarg3, esp); // set start value for copy loop
__ str(c_rarg1, monitor_block_bot); // set new monitor block bottom __ str(c_rarg1, monitor_block_bot); // set new monitor block bottom
__ cmp(sp, c_rarg3); // Check if we need to move sp __ sub(sp, sp, entry_size); // make room for the monitor
__ br(Assembler::LO, no_adjust); // to allow more stack space
// for our new esp
__ sub(sp, sp, 2 * wordSize);
__ bind(no_adjust);
__ b(entry); __ b(entry);
// 2. move expression stack contents // 2. move expression stack contents

View File

@ -64,17 +64,16 @@ void C1_MacroAssembler::explicit_null_check(Register base) {
void C1_MacroAssembler::build_frame(int frame_size_in_bytes, int bang_size_in_bytes) { void C1_MacroAssembler::build_frame(int frame_size_in_bytes, int bang_size_in_bytes) {
assert(bang_size_in_bytes >= frame_size_in_bytes, "stack bang size incorrect"); // Avoid stack bang as first instruction. It may get overwritten by patch_verified_entry.
const Register return_pc = R20;
mflr(return_pc);
// Make sure there is enough stack space for this method's activation. // Make sure there is enough stack space for this method's activation.
assert(bang_size_in_bytes >= frame_size_in_bytes, "stack bang size incorrect");
generate_stack_overflow_check(bang_size_in_bytes); generate_stack_overflow_check(bang_size_in_bytes);
// Create the frame. std(return_pc, _abi(lr), R1_SP); // SP->lr = return_pc
const Register return_pc = R0; push_frame(frame_size_in_bytes, R0); // SP -= frame_size_in_bytes
mflr(return_pc);
// Get callers sp.
std(return_pc, _abi(lr), R1_SP); // SP->lr = return_pc
push_frame(frame_size_in_bytes, R0); // SP -= frame_size_in_bytes
} }

View File

@ -1097,21 +1097,19 @@ EmitCallOffsets emit_call_with_trampoline_stub(MacroAssembler &_masm, address en
// No entry point given, use the current pc. // No entry point given, use the current pc.
if (entry_point == NULL) entry_point = __ pc(); if (entry_point == NULL) entry_point = __ pc();
if (!Compile::current()->in_scratch_emit_size()) { // Put the entry point as a constant into the constant pool.
// Put the entry point as a constant into the constant pool. const address entry_point_toc_addr = __ address_constant(entry_point, RelocationHolder::none);
const address entry_point_toc_addr = __ address_constant(entry_point, RelocationHolder::none); if (entry_point_toc_addr == NULL) {
if (entry_point_toc_addr == NULL) { ciEnv::current()->record_out_of_memory_failure();
ciEnv::current()->record_out_of_memory_failure(); return offsets;
return offsets;
}
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
if (ciEnv::current()->failing()) { return offsets; } // Code cache may be full.
__ relocate(rtype);
} }
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
if (ciEnv::current()->failing()) { return offsets; } // Code cache may be full.
__ relocate(rtype);
// Note: At this point we do not have the address of the trampoline // Note: At this point we do not have the address of the trampoline
// stub, and the entry point might be too far away for bl, so __ pc() // stub, and the entry point might be too far away for bl, so __ pc()
// serves as dummy and the bl will be patched later. // serves as dummy and the bl will be patched later.
@ -2166,6 +2164,16 @@ bool Matcher::narrow_klass_use_complex_address() {
return false; return false;
} }
bool Matcher::const_oop_prefer_decode() {
// Prefer ConN+DecodeN over ConP in simple compressed oops mode.
return Universe::narrow_oop_base() == NULL;
}
bool Matcher::const_klass_prefer_decode() {
// Prefer ConNKlass+DecodeNKlass over ConP in simple compressed klass mode.
return Universe::narrow_klass_base() == NULL;
}
// Is it better to copy float constants, or load them directly from memory? // Is it better to copy float constants, or load them directly from memory?
// Intel can load a float constant from a direct address, requiring no // Intel can load a float constant from a direct address, requiring no
// extra registers. Most RISCs will have to materialize an address into a // extra registers. Most RISCs will have to materialize an address into a
@ -2424,23 +2432,21 @@ encode %{
MacroAssembler _masm(&cbuf); MacroAssembler _masm(&cbuf);
int toc_offset = 0; int toc_offset = 0;
if (!ra_->C->in_scratch_emit_size()) { address const_toc_addr;
address const_toc_addr; // Create a non-oop constant, no relocation needed.
// Create a non-oop constant, no relocation needed. // If it is an IC, it has a virtual_call_Relocation.
// If it is an IC, it has a virtual_call_Relocation. const_toc_addr = __ long_constant((jlong)$src$$constant);
const_toc_addr = __ long_constant((jlong)$src$$constant); if (const_toc_addr == NULL) {
if (const_toc_addr == NULL) { ciEnv::current()->record_out_of_memory_failure();
ciEnv::current()->record_out_of_memory_failure(); return;
return;
}
// Get the constant's TOC offset.
toc_offset = __ offset_to_method_toc(const_toc_addr);
// Keep the current instruction offset in mind.
((loadConLNode*)this)->_cbuf_insts_offset = __ offset();
} }
// Get the constant's TOC offset.
toc_offset = __ offset_to_method_toc(const_toc_addr);
// Keep the current instruction offset in mind.
((loadConLNode*)this)->_cbuf_insts_offset = __ offset();
__ ld($dst$$Register, toc_offset, $toc$$Register); __ ld($dst$$Register, toc_offset, $toc$$Register);
%} %}
@ -2576,32 +2582,30 @@ encode %{
MacroAssembler _masm(&cbuf); MacroAssembler _masm(&cbuf);
int toc_offset = 0; int toc_offset = 0;
if (!ra_->C->in_scratch_emit_size()) { intptr_t val = $src$$constant;
intptr_t val = $src$$constant; relocInfo::relocType constant_reloc = $src->constant_reloc(); // src
relocInfo::relocType constant_reloc = $src->constant_reloc(); // src address const_toc_addr;
address const_toc_addr; if (constant_reloc == relocInfo::oop_type) {
if (constant_reloc == relocInfo::oop_type) { // Create an oop constant and a corresponding relocation.
// Create an oop constant and a corresponding relocation. AddressLiteral a = __ allocate_oop_address((jobject)val);
AddressLiteral a = __ allocate_oop_address((jobject)val); const_toc_addr = __ address_constant((address)a.value(), RelocationHolder::none);
const_toc_addr = __ address_constant((address)a.value(), RelocationHolder::none); __ relocate(a.rspec());
__ relocate(a.rspec()); } else if (constant_reloc == relocInfo::metadata_type) {
} else if (constant_reloc == relocInfo::metadata_type) { AddressLiteral a = __ constant_metadata_address((Metadata *)val);
AddressLiteral a = __ constant_metadata_address((Metadata *)val); const_toc_addr = __ address_constant((address)a.value(), RelocationHolder::none);
const_toc_addr = __ address_constant((address)a.value(), RelocationHolder::none); __ relocate(a.rspec());
__ relocate(a.rspec()); } else {
} else { // Create a non-oop constant, no relocation needed.
// Create a non-oop constant, no relocation needed. const_toc_addr = __ long_constant((jlong)$src$$constant);
const_toc_addr = __ long_constant((jlong)$src$$constant);
}
if (const_toc_addr == NULL) {
ciEnv::current()->record_out_of_memory_failure();
return;
}
// Get the constant's TOC offset.
toc_offset = __ offset_to_method_toc(const_toc_addr);
} }
if (const_toc_addr == NULL) {
ciEnv::current()->record_out_of_memory_failure();
return;
}
// Get the constant's TOC offset.
toc_offset = __ offset_to_method_toc(const_toc_addr);
__ ld($dst$$Register, toc_offset, $toc$$Register); __ ld($dst$$Register, toc_offset, $toc$$Register);
%} %}
@ -3272,28 +3276,26 @@ encode %{
} else { } else {
// Remember the offset not the address. // Remember the offset not the address.
const int start_offset = __ offset(); const int start_offset = __ offset();
// The trampoline stub. // The trampoline stub.
if (!Compile::current()->in_scratch_emit_size()) { // No entry point given, use the current pc.
// No entry point given, use the current pc. // Make sure branch fits into
// Make sure branch fits into if (entry_point == 0) entry_point = __ pc();
if (entry_point == 0) entry_point = __ pc();
// Put the entry point as a constant into the constant pool. // Put the entry point as a constant into the constant pool.
const address entry_point_toc_addr = __ address_constant(entry_point, RelocationHolder::none); const address entry_point_toc_addr = __ address_constant(entry_point, RelocationHolder::none);
if (entry_point_toc_addr == NULL) { if (entry_point_toc_addr == NULL) {
ciEnv::current()->record_out_of_memory_failure(); ciEnv::current()->record_out_of_memory_failure();
return; return;
}
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
if (ciEnv::current()->failing()) { return; } // Code cache may be full.
int method_index = resolved_method_index(cbuf);
__ relocate(_optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
: static_call_Relocation::spec(method_index));
} }
const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
if (ciEnv::current()->failing()) { return; } // Code cache may be full.
int method_index = resolved_method_index(cbuf);
__ relocate(_optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
: static_call_Relocation::spec(method_index));
// The real call. // The real call.
// Note: At this point we do not have the address of the trampoline // Note: At this point we do not have the address of the trampoline

View File

@ -2550,7 +2550,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteContr
__ lbzx(R17_tos, Rclass_or_obj, Roffset); __ lbzx(R17_tos, Rclass_or_obj, Roffset);
__ extsb(R17_tos, R17_tos); __ extsb(R17_tos, R17_tos);
__ push(ztos); __ push(ztos);
if (!is_static) { if (!is_static && rc == may_rewrite) {
// use btos rewriting, no truncating to t/f bit is needed for getfield. // use btos rewriting, no truncating to t/f bit is needed for getfield.
patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch); patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch);
} }
@ -2874,7 +2874,9 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteContr
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ andi(R17_tos, R17_tos, 0x1); __ andi(R17_tos, R17_tos, 0x1);
__ stbx(R17_tos, Rclass_or_obj, Roffset); __ stbx(R17_tos, Rclass_or_obj, Roffset);
if (!is_static) { patch_bytecode(Bytecodes::_fast_zputfield, Rbc, Rscratch, true, byte_no); } if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_zputfield, Rbc, Rscratch, true, byte_no);
}
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile? __ beq(CR_is_vol, Lvolatile); // Volatile?
} }

View File

@ -71,7 +71,7 @@ void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, T
if (HotSpotMetaspaceConstantImpl::compressed(constant)) { if (HotSpotMetaspaceConstantImpl::compressed(constant)) {
#ifdef _LP64 #ifdef _LP64
NativeMovConstReg32* move = nativeMovConstReg32_at(pc); NativeMovConstReg32* move = nativeMovConstReg32_at(pc);
narrowKlass narrowOop = record_narrow_metadata_reference(constant, CHECK); narrowKlass narrowOop = record_narrow_metadata_reference(_instructions, pc, constant, CHECK);
move->set_data((intptr_t)narrowOop); move->set_data((intptr_t)narrowOop);
TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/0x%x", p2i(pc), narrowOop); TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/0x%x", p2i(pc), narrowOop);
#else #else
@ -79,7 +79,7 @@ void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, T
#endif #endif
} else { } else {
NativeMovConstReg* move = nativeMovConstReg_at(pc); NativeMovConstReg* move = nativeMovConstReg_at(pc);
void* reference = record_metadata_reference(constant, CHECK); void* reference = record_metadata_reference(_instructions, pc, constant, CHECK);
move->set_data((intptr_t)reference); move->set_data((intptr_t)reference);
TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(reference)); TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(reference));
} }

View File

@ -2003,6 +2003,20 @@ bool Matcher::narrow_klass_use_complex_address() {
return false; return false;
} }
bool Matcher::const_oop_prefer_decode() {
// TODO: Check if loading ConP from TOC in heap-based mode is better:
// Prefer ConN+DecodeN over ConP in simple compressed oops mode.
// return Universe::narrow_oop_base() == NULL;
return true;
}
bool Matcher::const_klass_prefer_decode() {
// TODO: Check if loading ConP from TOC in heap-based mode is better:
// Prefer ConNKlass+DecodeNKlass over ConP in simple compressed klass mode.
// return Universe::narrow_klass_base() == NULL;
return true;
}
// Is it better to copy float constants, or load them directly from memory? // Is it better to copy float constants, or load them directly from memory?
// Intel can load a float constant from a direct address, requiring no // Intel can load a float constant from a direct address, requiring no
// extra registers. Most RISCs will have to materialize an address into a // extra registers. Most RISCs will have to materialize an address into a

View File

@ -384,8 +384,9 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, i
address entry = __ pc(); address entry = __ pc();
__ get_constant_pool_cache(LcpoolCache); // load LcpoolCache __ get_constant_pool_cache(LcpoolCache); // load LcpoolCache
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
// Check if we need to take lock at entry of synchronized method. // Check if we need to take lock at entry of synchronized method. This can
if (UseJVMCICompiler) { // only occur on method entry so emit it only for vtos with step 0.
if (UseJVMCICompiler && state == vtos && step == 0) {
Label L; Label L;
Address pending_monitor_enter_addr(G2_thread, JavaThread::pending_monitorenter_offset()); Address pending_monitor_enter_addr(G2_thread, JavaThread::pending_monitorenter_offset());
__ ldbool(pending_monitor_enter_addr, Gtemp); // Load if pending monitor enter __ ldbool(pending_monitor_enter_addr, Gtemp); // Load if pending monitor enter
@ -395,6 +396,17 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, i
// Take lock. // Take lock.
lock_method(); lock_method();
__ bind(L); __ bind(L);
} else {
#ifdef ASSERT
if (UseJVMCICompiler) {
Label L;
Address pending_monitor_enter_addr(G2_thread, JavaThread::pending_monitorenter_offset());
__ ldbool(pending_monitor_enter_addr, Gtemp); // Load if pending monitor enter
__ cmp_and_br_short(Gtemp, G0, Assembler::equal, Assembler::pn, L);
__ stop("unexpected pending monitor in deopt entry");
__ bind(L);
}
#endif
} }
#endif #endif
{ Label L; { Label L;

View File

@ -84,7 +84,6 @@
declare_constant(VM_Version::sun4v_m) \ declare_constant(VM_Version::sun4v_m) \
declare_constant(VM_Version::blk_init_instructions_m) \ declare_constant(VM_Version::blk_init_instructions_m) \
declare_constant(VM_Version::fmaf_instructions_m) \ declare_constant(VM_Version::fmaf_instructions_m) \
declare_constant(VM_Version::fmau_instructions_m) \
declare_constant(VM_Version::sparc64_family_m) \ declare_constant(VM_Version::sparc64_family_m) \
declare_constant(VM_Version::M_family_m) \ declare_constant(VM_Version::M_family_m) \
declare_constant(VM_Version::T_family_m) \ declare_constant(VM_Version::T_family_m) \

View File

@ -179,7 +179,7 @@ void VM_Version::initialize() {
assert((OptoLoopAlignment % relocInfo::addr_unit()) == 0, "alignment is not a multiple of NOP size"); assert((OptoLoopAlignment % relocInfo::addr_unit()) == 0, "alignment is not a multiple of NOP size");
char buf[512]; char buf[512];
jio_snprintf(buf, sizeof(buf), "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s", jio_snprintf(buf, sizeof(buf), "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s",
(has_v9() ? ", v9" : (has_v8() ? ", v8" : "")), (has_v9() ? ", v9" : (has_v8() ? ", v8" : "")),
(has_hardware_popc() ? ", popc" : ""), (has_hardware_popc() ? ", popc" : ""),
(has_vis1() ? ", vis1" : ""), (has_vis1() ? ", vis1" : ""),
@ -193,6 +193,7 @@ void VM_Version::initialize() {
(has_sha512() ? ", sha512" : ""), (has_sha512() ? ", sha512" : ""),
(has_crc32c() ? ", crc32c" : ""), (has_crc32c() ? ", crc32c" : ""),
(is_ultra3() ? ", ultra3" : ""), (is_ultra3() ? ", ultra3" : ""),
(has_sparc5_instr() ? ", sparc5" : ""),
(is_sun4v() ? ", sun4v" : ""), (is_sun4v() ? ", sun4v" : ""),
(is_niagara_plus() ? ", niagara_plus" : (is_niagara() ? ", niagara" : "")), (is_niagara_plus() ? ", niagara_plus" : (is_niagara() ? ", niagara" : "")),
(is_sparc64() ? ", sparc64" : ""), (is_sparc64() ? ", sparc64" : ""),
@ -487,16 +488,11 @@ int VM_Version::parse_features(const char* implementation) {
if (strstr(impl, "SPARC-T1") != NULL) { if (strstr(impl, "SPARC-T1") != NULL) {
features |= T1_model_m; features |= T1_model_m;
} }
} else if (strstr(impl, "SUN4V-CPU") != NULL) {
// Generic or migration class LDOM
features |= T_family_m;
} else { } else {
if (strstr(impl, "SPARC") == NULL) { log_info(os, cpu)("Failed to parse CPU implementation = '%s'", impl);
#ifndef PRODUCT
// kstat on Solaris 8 virtual machines (branded zones)
// returns "(unsupported)" implementation. Solaris 8 is not
// supported anymore, but include this check to be on the
// safe side.
warning("Can't parse CPU implementation = '%s', assume generic SPARC", impl);
#endif
}
} }
os::free((void*)impl); os::free((void*)impl);
return features; return features;

View File

@ -34,30 +34,29 @@ class VM_Version: public Abstract_VM_Version {
protected: protected:
enum Feature_Flag { enum Feature_Flag {
v8_instructions = 0, v8_instructions = 0,
hardware_mul32 = 1, hardware_mul32 = 1,
hardware_div32 = 2, hardware_div32 = 2,
hardware_fsmuld = 3, hardware_fsmuld = 3,
hardware_popc = 4, hardware_popc = 4,
v9_instructions = 5, v9_instructions = 5,
vis1_instructions = 6, vis1_instructions = 6,
vis2_instructions = 7, vis2_instructions = 7,
sun4v_instructions = 8, sun4v_instructions = 8,
blk_init_instructions = 9, blk_init_instructions = 9,
fmaf_instructions = 10, fmaf_instructions = 10,
fmau_instructions = 11, vis3_instructions = 11,
vis3_instructions = 12, cbcond_instructions = 12,
cbcond_instructions = 13, sparc64_family = 13,
sparc64_family = 14, M_family = 14,
M_family = 15, T_family = 15,
T_family = 16, T1_model = 16,
T1_model = 17, sparc5_instructions = 17,
sparc5_instructions = 18, aes_instructions = 18,
aes_instructions = 19, sha1_instruction = 19,
sha1_instruction = 20, sha256_instruction = 20,
sha256_instruction = 21, sha512_instruction = 21,
sha512_instruction = 22, crc32c_instruction = 22
crc32c_instruction = 23
}; };
enum Feature_Flag_Set { enum Feature_Flag_Set {
@ -75,7 +74,6 @@ protected:
sun4v_m = 1 << sun4v_instructions, sun4v_m = 1 << sun4v_instructions,
blk_init_instructions_m = 1 << blk_init_instructions, blk_init_instructions_m = 1 << blk_init_instructions,
fmaf_instructions_m = 1 << fmaf_instructions, fmaf_instructions_m = 1 << fmaf_instructions,
fmau_instructions_m = 1 << fmau_instructions,
vis3_instructions_m = 1 << vis3_instructions, vis3_instructions_m = 1 << vis3_instructions,
cbcond_instructions_m = 1 << cbcond_instructions, cbcond_instructions_m = 1 << cbcond_instructions,
sparc64_family_m = 1 << sparc64_family, sparc64_family_m = 1 << sparc64_family,

View File

@ -89,14 +89,14 @@ void CodeInstaller::pd_patch_MetaspaceConstant(int pc_offset, Handle constant, T
if (HotSpotMetaspaceConstantImpl::compressed(constant)) { if (HotSpotMetaspaceConstantImpl::compressed(constant)) {
#ifdef _LP64 #ifdef _LP64
address operand = Assembler::locate_operand(pc, Assembler::narrow_oop_operand); address operand = Assembler::locate_operand(pc, Assembler::narrow_oop_operand);
*((narrowKlass*) operand) = record_narrow_metadata_reference(constant, CHECK); *((narrowKlass*) operand) = record_narrow_metadata_reference(_instructions, operand, constant, CHECK);
TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(operand)); TRACE_jvmci_3("relocating (narrow metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(operand));
#else #else
JVMCI_ERROR("compressed Klass* on 32bit"); JVMCI_ERROR("compressed Klass* on 32bit");
#endif #endif
} else { } else {
address operand = Assembler::locate_operand(pc, Assembler::imm_operand); address operand = Assembler::locate_operand(pc, Assembler::imm_operand);
*((void**) operand) = record_metadata_reference(constant, CHECK); *((void**) operand) = record_metadata_reference(_instructions, operand, constant, CHECK);
TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(operand)); TRACE_jvmci_3("relocating (metaspace constant) at " PTR_FORMAT "/" PTR_FORMAT, p2i(pc), p2i(operand));
} }
} }

View File

@ -254,8 +254,9 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, i
const Register thread = NOT_LP64(rcx) LP64_ONLY(r15_thread); const Register thread = NOT_LP64(rcx) LP64_ONLY(r15_thread);
NOT_LP64(__ get_thread(thread)); NOT_LP64(__ get_thread(thread));
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
// Check if we need to take lock at entry of synchronized method. // Check if we need to take lock at entry of synchronized method. This can
if (UseJVMCICompiler) { // only occur on method entry so emit it only for vtos with step 0.
if (UseJVMCICompiler && state == vtos && step == 0) {
Label L; Label L;
__ cmpb(Address(thread, JavaThread::pending_monitorenter_offset()), 0); __ cmpb(Address(thread, JavaThread::pending_monitorenter_offset()), 0);
__ jcc(Assembler::zero, L); __ jcc(Assembler::zero, L);
@ -266,6 +267,16 @@ address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, i
// Take lock. // Take lock.
lock_method(); lock_method();
__ bind(L); __ bind(L);
} else {
#ifdef ASSERT
if (UseJVMCICompiler) {
Label L;
__ cmpb(Address(r15_thread, JavaThread::pending_monitorenter_offset()), 0);
__ jccb(Assembler::zero, L);
__ stop("unexpected pending monitor in deopt entry");
__ bind(L);
}
#endif
} }
#endif #endif
// handle exceptions // handle exceptions

View File

@ -1452,6 +1452,15 @@ bool Matcher::narrow_klass_use_complex_address() {
return true; return true;
} }
bool Matcher::const_oop_prefer_decode() {
ShouldNotCallThis();
return true;
}
bool Matcher::const_klass_prefer_decode() {
ShouldNotCallThis();
return true;
}
// Is it better to copy float constants, or load them directly from memory? // Is it better to copy float constants, or load them directly from memory?
// Intel can load a float constant from a direct address, requiring no // Intel can load a float constant from a direct address, requiring no

View File

@ -1660,6 +1660,19 @@ bool Matcher::narrow_klass_use_complex_address() {
return (LogKlassAlignmentInBytes <= 3); return (LogKlassAlignmentInBytes <= 3);
} }
bool Matcher::const_oop_prefer_decode() {
// Prefer ConN+DecodeN over ConP.
return true;
}
bool Matcher::const_klass_prefer_decode() {
// TODO: Either support matching DecodeNKlass (heap-based) in operand
// or condisider the following:
// Prefer ConNKlass+DecodeNKlass over ConP in simple compressed klass mode.
//return Universe::narrow_klass_base() == NULL;
return true;
}
// Is it better to copy float constants, or load them directly from // Is it better to copy float constants, or load them directly from
// memory? Intel can load a float constant from a direct address, // memory? Intel can load a float constant from a direct address,
// requiring no extra registers. Most RISCs will have to materialize // requiring no extra registers. Most RISCs will have to materialize

View File

@ -96,9 +96,6 @@ public class SPARCHotSpotJVMCIBackendFactory implements HotSpotJVMCIBackendFacto
if ((config.vmVersionFeatures & config.sparcFmafInstructions) != 0) { if ((config.vmVersionFeatures & config.sparcFmafInstructions) != 0) {
features.add(CPUFeature.FMAF); features.add(CPUFeature.FMAF);
} }
if ((config.vmVersionFeatures & config.sparcFmauInstructions) != 0) {
features.add(CPUFeature.FMAU);
}
if ((config.vmVersionFeatures & config.sparcSparc64Family) != 0) { if ((config.vmVersionFeatures & config.sparcSparc64Family) != 0) {
features.add(CPUFeature.SPARC64_FAMILY); features.add(CPUFeature.SPARC64_FAMILY);
} }

View File

@ -55,7 +55,6 @@ class SPARCHotSpotVMConfig extends HotSpotVMConfigAccess {
final int sparcSun4v = getConstant("VM_Version::sun4v_m", Integer.class); final int sparcSun4v = getConstant("VM_Version::sun4v_m", Integer.class);
final int sparcBlkInitInstructions = getConstant("VM_Version::blk_init_instructions_m", Integer.class); final int sparcBlkInitInstructions = getConstant("VM_Version::blk_init_instructions_m", Integer.class);
final int sparcFmafInstructions = getConstant("VM_Version::fmaf_instructions_m", Integer.class); final int sparcFmafInstructions = getConstant("VM_Version::fmaf_instructions_m", Integer.class);
final int sparcFmauInstructions = getConstant("VM_Version::fmau_instructions_m", Integer.class);
final int sparcSparc64Family = getConstant("VM_Version::sparc64_family_m", Integer.class); final int sparcSparc64Family = getConstant("VM_Version::sparc64_family_m", Integer.class);
final int sparcMFamily = getConstant("VM_Version::M_family_m", Integer.class); final int sparcMFamily = getConstant("VM_Version::M_family_m", Integer.class);
final int sparcTFamily = getConstant("VM_Version::T_family_m", Integer.class); final int sparcTFamily = getConstant("VM_Version::T_family_m", Integer.class);

View File

@ -20,17 +20,13 @@
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*/ */
package jdk.vm.ci.hotspot.services; package jdk.vm.ci.hotspot;
/** /**
* An empty implementation for {@link EventProvider}. This implementation is used when no logging is * An empty implementation for {@link EventProvider}. This implementation is used when no logging is
* requested. * requested.
*/ */
final class EmptyEventProvider extends EventProvider { final class EmptyEventProvider implements EventProvider {
EmptyEventProvider() {
super(null);
}
static InternalError shouldNotReachHere() { static InternalError shouldNotReachHere() {
throw new InternalError("should not reach here"); throw new InternalError("should not reach here");

View File

@ -20,58 +20,36 @@
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*/ */
package jdk.vm.ci.hotspot.services; package jdk.vm.ci.hotspot;
import jdk.vm.ci.hotspot.services.EmptyEventProvider.EmptyCompilationEvent; import jdk.vm.ci.hotspot.EmptyEventProvider.EmptyCompilationEvent;
import jdk.vm.ci.hotspot.services.EmptyEventProvider.EmptyCompilerFailureEvent; import jdk.vm.ci.hotspot.EmptyEventProvider.EmptyCompilerFailureEvent;
import jdk.vm.ci.services.JVMCIPermission; import jdk.vm.ci.services.JVMCIPermission;
/** /**
* Service-provider class for logging compiler related events. * Service-provider class for logging compiler related events.
*/ */
public abstract class EventProvider { public interface EventProvider {
private static Void checkPermission() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new JVMCIPermission());
}
return null;
}
@SuppressWarnings("unused")
EventProvider(Void ignore) {
}
/**
* Initializes a new instance of this class.
*
* @throws SecurityException if a security manager has been installed and it denies
* {@link JVMCIPermission}
*/
protected EventProvider() {
this(checkPermission());
}
/** /**
* Creates and returns an empty implementation for {@link EventProvider}. This implementation * Creates and returns an empty implementation for {@link EventProvider}. This implementation
* can be used when no logging is requested. * can be used when no logging is requested.
*/ */
public static EventProvider createEmptyEventProvider() { static EventProvider createEmptyEventProvider() {
return new EmptyEventProvider(); return new EmptyEventProvider();
} }
/** /**
* Creates and returns an empty implementation for {@link CompilationEvent}. * Creates and returns an empty implementation for {@link CompilationEvent}.
*/ */
public static CompilationEvent createEmptyCompilationEvent() { static CompilationEvent createEmptyCompilationEvent() {
return new EmptyCompilationEvent(); return new EmptyCompilationEvent();
} }
/** /**
* Creates and returns an empty implementation for {@link CompilationEvent}. * Creates and returns an empty implementation for {@link CompilationEvent}.
*/ */
public static CompilerFailureEvent createEmptyCompilerFailureEvent() { static CompilerFailureEvent createEmptyCompilerFailureEvent() {
return new EmptyCompilerFailureEvent(); return new EmptyCompilerFailureEvent();
} }

View File

@ -26,8 +26,10 @@ import jdk.vm.ci.code.CompilationRequest;
import jdk.vm.ci.common.JVMCIError; import jdk.vm.ci.common.JVMCIError;
import jdk.vm.ci.hotspot.HotSpotJVMCIRuntime.Option; import jdk.vm.ci.hotspot.HotSpotJVMCIRuntime.Option;
import jdk.vm.ci.runtime.JVMCICompiler; import jdk.vm.ci.runtime.JVMCICompiler;
import jdk.vm.ci.runtime.JVMCICompilerFactory;
import jdk.vm.ci.runtime.JVMCIRuntime; import jdk.vm.ci.runtime.JVMCIRuntime;
import jdk.vm.ci.runtime.services.JVMCICompilerFactory; import jdk.vm.ci.services.JVMCIServiceLocator;
import jdk.vm.ci.services.JVMCIPermission;
import jdk.vm.ci.services.Services; import jdk.vm.ci.services.Services;
final class HotSpotJVMCICompilerConfig { final class HotSpotJVMCICompilerConfig {
@ -37,7 +39,7 @@ final class HotSpotJVMCICompilerConfig {
* to perform a compilation. This allows the reflective parts of the JVMCI API to be used * to perform a compilation. This allows the reflective parts of the JVMCI API to be used
* without requiring a compiler implementation to be available. * without requiring a compiler implementation to be available.
*/ */
private static class DummyCompilerFactory extends JVMCICompilerFactory implements JVMCICompiler { private static class DummyCompilerFactory implements JVMCICompilerFactory, JVMCICompiler {
public HotSpotCompilationRequestResult compileMethod(CompilationRequest request) { public HotSpotCompilationRequestResult compileMethod(CompilationRequest request) {
throw new JVMCIError("no JVMCI compiler selected"); throw new JVMCIError("no JVMCI compiler selected");
@ -63,15 +65,16 @@ final class HotSpotJVMCICompilerConfig {
* Gets the selected system compiler factory. * Gets the selected system compiler factory.
* *
* @return the selected system compiler factory * @return the selected system compiler factory
* @throws SecurityException if a security manager is present and it denies
* {@link JVMCIPermission} for any {@link JVMCIServiceLocator} loaded by this method
*/ */
static JVMCICompilerFactory getCompilerFactory() { static JVMCICompilerFactory getCompilerFactory() {
if (compilerFactory == null) { if (compilerFactory == null) {
JVMCICompilerFactory factory = null; JVMCICompilerFactory factory = null;
String compilerName = Option.Compiler.getString(); String compilerName = Option.Compiler.getString();
if (compilerName != null) { if (compilerName != null) {
for (JVMCICompilerFactory f : Services.load(JVMCICompilerFactory.class)) { for (JVMCICompilerFactory f : JVMCIServiceLocator.getProviders(JVMCICompilerFactory.class)) {
if (f.getCompilerName().equals(compilerName)) { if (f.getCompilerName().equals(compilerName)) {
Services.exportJVMCITo(f.getClass());
factory = f; factory = f;
} }
} }
@ -80,8 +83,9 @@ final class HotSpotJVMCICompilerConfig {
} }
} else { } else {
// Auto select a single available compiler // Auto select a single available compiler
for (JVMCICompilerFactory f : Services.load(JVMCICompilerFactory.class)) { for (JVMCICompilerFactory f : JVMCIServiceLocator.getProviders(JVMCICompilerFactory.class)) {
if (factory == null) { if (factory == null) {
Services.exportJVMCITo(f.getClass());
factory = f; factory = f;
} else { } else {
// Multiple factories seen - cancel auto selection // Multiple factories seen - cancel auto selection

View File

@ -20,14 +20,14 @@
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*/ */
package jdk.vm.ci.hotspot.services; package jdk.vm.ci.hotspot;
import jdk.vm.ci.runtime.services.JVMCICompilerFactory; import jdk.vm.ci.runtime.JVMCICompilerFactory;
/** /**
* HotSpot extensions to {@link JVMCICompilerFactory}. * HotSpot extensions to {@link JVMCICompilerFactory}.
*/ */
public abstract class HotSpotJVMCICompilerFactory extends JVMCICompilerFactory { public abstract class HotSpotJVMCICompilerFactory implements JVMCICompilerFactory {
/** /**
* Gets 0 or more prefixes identifying classes that should by compiled by C1 in simple mode * Gets 0 or more prefixes identifying classes that should by compiled by C1 in simple mode

View File

@ -27,13 +27,11 @@ import static jdk.vm.ci.common.InitTimer.timer;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.ServiceLoader;
import java.util.TreeMap; import java.util.TreeMap;
import jdk.internal.misc.VM; import jdk.internal.misc.VM;
@ -43,16 +41,15 @@ import jdk.vm.ci.code.CompiledCode;
import jdk.vm.ci.code.InstalledCode; import jdk.vm.ci.code.InstalledCode;
import jdk.vm.ci.common.InitTimer; import jdk.vm.ci.common.InitTimer;
import jdk.vm.ci.common.JVMCIError; import jdk.vm.ci.common.JVMCIError;
import jdk.vm.ci.hotspot.services.HotSpotJVMCICompilerFactory; import jdk.vm.ci.hotspot.HotSpotJVMCICompilerFactory.CompilationLevel;
import jdk.vm.ci.hotspot.services.HotSpotJVMCICompilerFactory.CompilationLevel;
import jdk.vm.ci.hotspot.services.HotSpotVMEventListener;
import jdk.vm.ci.meta.JavaKind; import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.JavaType; import jdk.vm.ci.meta.JavaType;
import jdk.vm.ci.meta.ResolvedJavaType; import jdk.vm.ci.meta.ResolvedJavaType;
import jdk.vm.ci.runtime.JVMCI; import jdk.vm.ci.runtime.JVMCI;
import jdk.vm.ci.runtime.JVMCIBackend; import jdk.vm.ci.runtime.JVMCIBackend;
import jdk.vm.ci.runtime.JVMCICompiler; import jdk.vm.ci.runtime.JVMCICompiler;
import jdk.vm.ci.runtime.services.JVMCICompilerFactory; import jdk.vm.ci.runtime.JVMCICompilerFactory;
import jdk.vm.ci.services.JVMCIServiceLocator;
import jdk.vm.ci.services.Services; import jdk.vm.ci.services.Services;
/** /**
@ -90,14 +87,17 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
* A list of all supported JVMCI options. * A list of all supported JVMCI options.
*/ */
public enum Option { public enum Option {
// @formatter:off
Compiler(String.class, null, "Selects the system compiler."), Compiler(String.class, null, "Selects the system compiler."),
// Note: The following one is not used (see InitTimer.ENABLED). It is added here // Note: The following one is not used (see InitTimer.ENABLED). It is added here
// so that -Djvmci.PrintFlags=true shows the option. // so that -XX:+JVMCIPrintProperties shows the option.
InitTimer(boolean.class, false, "Specifies if initialization timing is enabled."), InitTimer(Boolean.class, false, "Specifies if initialization timing is enabled."),
PrintConfig(boolean.class, false, "Prints VM configuration available via JVMCI and exits."), PrintConfig(Boolean.class, false, "Prints VM configuration available via JVMCI."),
PrintFlags(boolean.class, false, "Prints all JVMCI flags and exits."), TraceMethodDataFilter(String.class, null,
ShowFlags(boolean.class, false, "Prints all JVMCI flags and continues."), "Enables tracing of profiling info when read by JVMCI.",
TraceMethodDataFilter(String.class, null, ""); "Empty value: trace all methods",
"Non-empty value: trace methods whose fully qualified name contains the value.");
// @formatter:on
/** /**
* The prefix for system properties that are JVMCI options. * The prefix for system properties that are JVMCI options.
@ -113,25 +113,25 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
private Object value; private Object value;
private final Object defaultValue; private final Object defaultValue;
private boolean isDefault; private boolean isDefault;
private final String help; private final String[] helpLines;
Option(Class<?> type, Object defaultValue, String help) { Option(Class<?> type, Object defaultValue, String... helpLines) {
assert Character.isUpperCase(name().charAt(0)) : "Option name must start with upper-case letter: " + name(); assert Character.isUpperCase(name().charAt(0)) : "Option name must start with upper-case letter: " + name();
this.type = type; this.type = type;
this.value = UNINITIALIZED; this.value = UNINITIALIZED;
this.defaultValue = defaultValue; this.defaultValue = defaultValue;
this.help = help; this.helpLines = helpLines;
} }
@SuppressFBWarnings(value = "ES_COMPARING_STRINGS_WITH_EQ", justification = "sentinel must be String since it's a static final in an enum") @SuppressFBWarnings(value = "ES_COMPARING_STRINGS_WITH_EQ", justification = "sentinel must be String since it's a static final in an enum")
private Object getValue() { private Object getValue() {
if (value == UNINITIALIZED) { if (value == UNINITIALIZED) {
String propertyValue = VM.getSavedProperty(JVMCI_OPTION_PROPERTY_PREFIX + name()); String propertyValue = VM.getSavedProperty(getPropertyName());
if (propertyValue == null) { if (propertyValue == null) {
this.value = defaultValue; this.value = defaultValue;
this.isDefault = true; this.isDefault = true;
} else { } else {
if (type == boolean.class) { if (type == Boolean.class) {
this.value = Boolean.parseBoolean(propertyValue); this.value = Boolean.parseBoolean(propertyValue);
} else if (type == String.class) { } else if (type == String.class) {
this.value = propertyValue; this.value = propertyValue;
@ -146,6 +146,13 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
return value; return value;
} }
/**
* Gets the name of system property from which this option gets its value.
*/
public String getPropertyName() {
return JVMCI_OPTION_PROPERTY_PREFIX + name();
}
/** /**
* Returns the option's value as boolean. * Returns the option's value as boolean.
* *
@ -165,16 +172,31 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
} }
/** /**
* Prints all option flags to {@code out}. * Prints a description of the properties used to configure shared JVMCI code.
* *
* @param out stream to print to * @param out stream to print to
*/ */
public static void printFlags(PrintStream out) { public static void printProperties(PrintStream out) {
out.println("[List of JVMCI options]"); out.println("[JVMCI properties]");
for (Option option : values()) { int typeWidth = 0;
int nameWidth = 0;
Option[] values = values();
for (Option option : values) {
typeWidth = Math.max(typeWidth, option.type.getSimpleName().length());
nameWidth = Math.max(nameWidth, option.getPropertyName().length());
}
for (Option option : values) {
Object value = option.getValue(); Object value = option.getValue();
String assign = option.isDefault ? ":=" : " ="; if (value instanceof String) {
out.printf("%9s %-40s %s %-14s %s%n", option.type.getSimpleName(), option, assign, value, option.help); value = '"' + String.valueOf(value) + '"';
}
String assign = option.isDefault ? " =" : ":=";
String format = "%" + (typeWidth + 1) + "s %-" + (nameWidth + 1) + "s %s %s%n";
out.printf(format, option.type.getSimpleName(), option.getPropertyName(), assign, value);
String helpFormat = "%" + (typeWidth + 1) + "s %s%n";
for (String line : option.helpLines) {
out.printf(helpFormat, "", line);
}
} }
} }
} }
@ -221,11 +243,7 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
if (vmEventListeners == null) { if (vmEventListeners == null) {
synchronized (this) { synchronized (this) {
if (vmEventListeners == null) { if (vmEventListeners == null) {
List<HotSpotVMEventListener> listeners = new ArrayList<>(); vmEventListeners = JVMCIServiceLocator.getProviders(HotSpotVMEventListener.class);
for (HotSpotVMEventListener vmEventListener : ServiceLoader.load(HotSpotVMEventListener.class)) {
listeners.add(vmEventListener);
}
vmEventListeners = listeners;
} }
} }
} }
@ -239,7 +257,6 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
@SuppressWarnings("unused") private final String[] trivialPrefixes; @SuppressWarnings("unused") private final String[] trivialPrefixes;
@SuppressWarnings("try") @SuppressWarnings("try")
@SuppressFBWarnings(value = "DM_EXIT", justification = "PrintFlags is meant to exit the VM")
private HotSpotJVMCIRuntime() { private HotSpotJVMCIRuntime() {
compilerToVm = new CompilerToVM(); compilerToVm = new CompilerToVM();
@ -261,20 +278,6 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
metaAccessContext = new HotSpotJVMCIMetaAccessContext(); metaAccessContext = new HotSpotJVMCIMetaAccessContext();
boolean printFlags = Option.PrintFlags.getBoolean();
boolean showFlags = Option.ShowFlags.getBoolean();
if (printFlags || showFlags) {
Option.printFlags(System.out);
if (printFlags) {
System.exit(0);
}
}
if (Option.PrintConfig.getBoolean()) {
printConfig(configStore, compilerToVm);
System.exit(0);
}
compilerFactory = HotSpotJVMCICompilerConfig.getCompilerFactory(); compilerFactory = HotSpotJVMCICompilerConfig.getCompilerFactory();
if (compilerFactory instanceof HotSpotJVMCICompilerFactory) { if (compilerFactory instanceof HotSpotJVMCICompilerFactory) {
hsCompilerFactory = (HotSpotJVMCICompilerFactory) compilerFactory; hsCompilerFactory = (HotSpotJVMCICompilerFactory) compilerFactory;
@ -298,6 +301,16 @@ public final class HotSpotJVMCIRuntime implements HotSpotJVMCIRuntimeProvider {
trivialPrefixes = null; trivialPrefixes = null;
compilationLevelAdjustment = config.compLevelAdjustmentNone; compilationLevelAdjustment = config.compLevelAdjustmentNone;
} }
if (config.getFlag("JVMCIPrintProperties", Boolean.class)) {
PrintStream out = new PrintStream(getLogStream());
Option.printProperties(out);
compilerFactory.printProperties(out);
}
if (Option.PrintConfig.getBoolean()) {
printConfig(configStore, compilerToVm);
}
} }
private JVMCIBackend registerBackend(JVMCIBackend backend) { private JVMCIBackend registerBackend(JVMCIBackend backend) {

View File

@ -53,9 +53,9 @@ final class HotSpotMethodData {
* Reference to the C++ MethodData object. * Reference to the C++ MethodData object.
*/ */
final long metaspaceMethodData; final long metaspaceMethodData;
@SuppressWarnings("unused") private final HotSpotResolvedJavaMethodImpl method; private final HotSpotResolvedJavaMethodImpl method;
public HotSpotMethodData(long metaspaceMethodData, HotSpotResolvedJavaMethodImpl method) { HotSpotMethodData(long metaspaceMethodData, HotSpotResolvedJavaMethodImpl method) {
this.metaspaceMethodData = metaspaceMethodData; this.metaspaceMethodData = metaspaceMethodData;
this.method = method; this.method = method;
} }
@ -107,6 +107,18 @@ final class HotSpotMethodData {
return UNSAFE.getByte(metaspaceMethodData + config.methodDataOopTrapHistoryOffset + config.deoptReasonOSROffset + reasonIndex) & 0xFF; return UNSAFE.getByte(metaspaceMethodData + config.methodDataOopTrapHistoryOffset + config.deoptReasonOSROffset + reasonIndex) & 0xFF;
} }
public int getDecompileCount() {
return UNSAFE.getInt(metaspaceMethodData + config.methodDataDecompiles);
}
public int getOverflowRecompileCount() {
return UNSAFE.getInt(metaspaceMethodData + config.methodDataOverflowRecompiles);
}
public int getOverflowTrapCount() {
return UNSAFE.getInt(metaspaceMethodData + config.methodDataOverflowTraps);
}
public HotSpotMethodDataAccessor getNormalData(int position) { public HotSpotMethodDataAccessor getNormalData(int position) {
if (position >= normalDataSize()) { if (position >= normalDataSize()) {
return null; return null;
@ -214,6 +226,12 @@ final class HotSpotMethodData {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
String nl = String.format("%n"); String nl = String.format("%n");
String nlIndent = String.format("%n%38s", ""); String nlIndent = String.format("%n%38s", "");
sb.append("Raw method data for ");
sb.append(method.format("%H.%n(%p)"));
sb.append(":");
sb.append(nl);
sb.append(String.format("nof_decompiles(%d) nof_overflow_recompiles(%d) nof_overflow_traps(%d)%n",
getDecompileCount(), getOverflowRecompileCount(), getOverflowTrapCount()));
if (hasNormalData()) { if (hasNormalData()) {
int pos = 0; int pos = 0;
HotSpotMethodDataAccessor data; HotSpotMethodDataAccessor data;
@ -427,6 +445,10 @@ final class HotSpotMethodData {
protected abstract long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position); protected abstract long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position);
public int getNonprofiledCount(HotSpotMethodData data, int position) {
return data.readUnsignedIntAsSignedInt(position, NONPROFILED_COUNT_OFFSET);
}
private JavaTypeProfile createTypeProfile(TriState nullSeen, RawItemProfile<ResolvedJavaType> profile) { private JavaTypeProfile createTypeProfile(TriState nullSeen, RawItemProfile<ResolvedJavaType> profile) {
if (profile.entries <= 0 || profile.totalCount <= 0) { if (profile.entries <= 0 || profile.totalCount <= 0) {
return null; return null;
@ -462,7 +484,7 @@ final class HotSpotMethodData {
TriState nullSeen = getNullSeen(data, pos); TriState nullSeen = getNullSeen(data, pos);
TriState exceptionSeen = getExceptionSeen(data, pos); TriState exceptionSeen = getExceptionSeen(data, pos);
sb.append(format("count(%d) null_seen(%s) exception_seen(%s) nonprofiled_count(%d) entries(%d)", getCounterValue(data, pos), nullSeen, exceptionSeen, sb.append(format("count(%d) null_seen(%s) exception_seen(%s) nonprofiled_count(%d) entries(%d)", getCounterValue(data, pos), nullSeen, exceptionSeen,
getTypesNotRecordedExecutionCount(data, pos), profile.entries)); getNonprofiledCount(data, pos), profile.entries));
for (int i = 0; i < profile.entries; i++) { for (int i = 0; i < profile.entries; i++) {
long count = profile.counts[i]; long count = profile.counts[i];
sb.append(format("%n %s (%d, %4.2f)", profile.items[i].toJavaName(), count, (double) count / profile.totalCount)); sb.append(format("%n %s (%d, %4.2f)", profile.items[i].toJavaName(), count, (double) count / profile.totalCount));
@ -490,7 +512,7 @@ final class HotSpotMethodData {
@Override @Override
protected long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position) { protected long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position) {
return data.readUnsignedIntAsSignedInt(position, NONPROFILED_COUNT_OFFSET); return getNonprofiledCount(data, position);
} }
} }
@ -788,7 +810,8 @@ final class HotSpotMethodData {
@Override @Override
public StringBuilder appendTo(StringBuilder sb, HotSpotMethodData data, int pos) { public StringBuilder appendTo(StringBuilder sb, HotSpotMethodData data, int pos) {
return null; sb.append("unknown profile data with tag: " + tag);
return sb;
} }
} }
@ -822,10 +845,10 @@ final class HotSpotMethodData {
private static boolean checkAccessorTags() { private static boolean checkAccessorTags() {
int expectedTag = 0; int expectedTag = 0;
for (HotSpotMethodDataAccessor accessor : PROFILE_DATA_ACCESSORS) { for (HotSpotMethodDataAccessor accessor : PROFILE_DATA_ACCESSORS) {
if (expectedTag ==0 ) { if (expectedTag == 0) {
assert accessor == null; assert accessor == null;
} else { } else {
assert accessor.tag == expectedTag: expectedTag + " != " + accessor.tag + " " + accessor; assert accessor.tag == expectedTag : expectedTag + " != " + accessor.tag + " " + accessor;
} }
expectedTag++; expectedTag++;
} }

View File

@ -57,6 +57,18 @@ public final class HotSpotProfilingInfo implements ProfilingInfo {
return method.getCodeSize(); return method.getCodeSize();
} }
public int getDecompileCount() {
return methodData.getDecompileCount();
}
public int getOverflowRecompileCount() {
return methodData.getOverflowRecompileCount();
}
public int getOverflowTrapCount() {
return methodData.getOverflowTrapCount();
}
@Override @Override
public JavaTypeProfile getTypeProfile(int bci) { public JavaTypeProfile getTypeProfile(int bci) {
if (!isMature) { if (!isMature) {

View File

@ -434,7 +434,6 @@ final class HotSpotResolvedJavaMethodImpl extends HotSpotMethod implements HotSp
methodData = new HotSpotMethodData(metaspaceMethodData, this); methodData = new HotSpotMethodData(metaspaceMethodData, this);
String methodDataFilter = Option.TraceMethodDataFilter.getString(); String methodDataFilter = Option.TraceMethodDataFilter.getString();
if (methodDataFilter != null && this.format("%H.%n").contains(methodDataFilter)) { if (methodDataFilter != null && this.format("%H.%n").contains(methodDataFilter)) {
System.out.println("Raw method data for " + this.format("%H.%n(%p)") + ":");
System.out.println(methodData.toString()); System.out.println(methodData.toString());
} }
} }

View File

@ -160,6 +160,10 @@ class HotSpotVMConfig extends HotSpotVMConfigAccess {
final int methodDataOopTrapHistoryOffset = getFieldOffset("MethodData::_trap_hist._array[0]", Integer.class, "u1"); final int methodDataOopTrapHistoryOffset = getFieldOffset("MethodData::_trap_hist._array[0]", Integer.class, "u1");
final int methodDataIRSizeOffset = getFieldOffset("MethodData::_jvmci_ir_size", Integer.class, "int"); final int methodDataIRSizeOffset = getFieldOffset("MethodData::_jvmci_ir_size", Integer.class, "int");
final int methodDataDecompiles = getFieldOffset("MethodData::_nof_decompiles", Integer.class, "uint");
final int methodDataOverflowRecompiles = getFieldOffset("MethodData::_nof_overflow_recompiles", Integer.class, "uint");
final int methodDataOverflowTraps = getFieldOffset("MethodData::_nof_overflow_traps", Integer.class, "uint");
final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "int"); final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "int");
final int compilationLevelNone = getConstant("CompLevel_none", Integer.class); final int compilationLevelNone = getConstant("CompLevel_none", Integer.class);

View File

@ -20,59 +20,35 @@
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*/ */
package jdk.vm.ci.hotspot.services; package jdk.vm.ci.hotspot;
import jdk.vm.ci.code.CompiledCode; import jdk.vm.ci.code.CompiledCode;
import jdk.vm.ci.code.InstalledCode; import jdk.vm.ci.code.InstalledCode;
import jdk.vm.ci.hotspot.HotSpotCodeCacheProvider;
import jdk.vm.ci.services.JVMCIPermission;
/** /**
* Service-provider class for responding to VM events. * Listener for responding to VM events.
*/ */
public abstract class HotSpotVMEventListener { public interface HotSpotVMEventListener {
private static Void checkPermission() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new JVMCIPermission());
}
return null;
}
@SuppressWarnings("unused")
HotSpotVMEventListener(Void ignore) {
}
/**
* Initializes a new instance of this class.
*
* @throws SecurityException if a security manager has been installed and it denies
* {@link JVMCIPermission}
*/
protected HotSpotVMEventListener() {
this(checkPermission());
}
/** /**
* Notifies this client that the VM is shutting down. * Notifies this client that the VM is shutting down.
*/ */
public void notifyShutdown() { default void notifyShutdown() {
} }
/** /**
* Notify on successful install into the code cache. * Notify on successful install into the code cache.
* *
* @param hotSpotCodeCacheProvider * @param hotSpotCodeCacheProvider the code cache into which the code was installed
* @param installedCode * @param installedCode the code that was installed
* @param compiledCode * @param compiledCode the compiled code from which {@code installedCode} was produced
*/ */
public void notifyInstall(HotSpotCodeCacheProvider hotSpotCodeCacheProvider, InstalledCode installedCode, CompiledCode compiledCode) { default void notifyInstall(HotSpotCodeCacheProvider hotSpotCodeCacheProvider, InstalledCode installedCode, CompiledCode compiledCode) {
} }
/** /**
* Notify on completion of a bootstrap. * Notify on completion of a bootstrap.
*/ */
public void notifyBootstrapFinished() { default void notifyBootstrapFinished() {
} }
} }

View File

@ -20,54 +20,38 @@
* or visit www.oracle.com if you need additional information or have any * or visit www.oracle.com if you need additional information or have any
* questions. * questions.
*/ */
package jdk.vm.ci.runtime.services; package jdk.vm.ci.runtime;
import jdk.vm.ci.runtime.JVMCICompiler; import java.io.PrintStream;
import jdk.vm.ci.runtime.JVMCIRuntime;
import jdk.vm.ci.services.JVMCIPermission;
/** /**
* Service-provider class for creating JVMCI compilers. * Factory for creating JVMCI compilers.
*/ */
public abstract class JVMCICompilerFactory { public interface JVMCICompilerFactory {
private static Void checkPermission() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new JVMCIPermission());
}
return null;
}
@SuppressWarnings("unused")
private JVMCICompilerFactory(Void ignore) {
}
/**
* Initializes a new instance of this class.
*
* @throws SecurityException if a security manager has been installed and it denies
* {@link JVMCIPermission}
*/
protected JVMCICompilerFactory() {
this(checkPermission());
}
/** /**
* Get the name of this compiler. The name is used by JVMCI to determine which factory to use. * Get the name of this compiler. The name is used by JVMCI to determine which factory to use.
*/ */
public abstract String getCompilerName(); String getCompilerName();
/** /**
* Notifies this object that it has been selected to {@linkplain #createCompiler(JVMCIRuntime) * Notifies this object that it has been selected to {@linkplain #createCompiler(JVMCIRuntime)
* create} a compiler and it should now perform any heavy weight initialization that it deferred * create} a compiler and it should now perform any heavy weight initialization that it deferred
* during construction. * during construction.
*/ */
public void onSelection() { default void onSelection() {
} }
/** /**
* Create a new instance of a {@link JVMCICompiler}. * Create a new instance of a {@link JVMCICompiler}.
*/ */
public abstract JVMCICompiler createCompiler(JVMCIRuntime runtime); JVMCICompiler createCompiler(JVMCIRuntime runtime);
/**
* Prints a description of the properties used to configure this compiler.
*
* @param out where to print the message
*/
default void printProperties(PrintStream out) {
}
} }

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.vm.ci.services;
import java.util.ArrayList;
import java.util.List;
/**
* Service-provider class for the runtime to locate providers of JVMCI services where the latter are
* not in packages exported by the JVMCI module. As part of instantiating
* {@link JVMCIServiceLocator}, all JVMCI packages will be {@linkplain Services#exportJVMCITo(Class)
* exported} to the module defining the class of the instantiated object.
*
* While the {@link #getProvider(Class)} method can be used directly, it's usually easier to use
* {@link #getProviders(Class)}.
*/
public abstract class JVMCIServiceLocator {
private static Void checkPermission() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new JVMCIPermission());
}
return null;
}
@SuppressWarnings("unused")
private JVMCIServiceLocator(Void ignore) {
}
/**
* Creates a capability for accessing JVMCI. Once successfully instantiated, JVMCI exports all
* its packages to the module defining the type of this object.
*
* @throws SecurityException if a security manager has been installed and it denies
* {@link JVMCIPermission}
*/
protected JVMCIServiceLocator() {
this(checkPermission());
Services.exportJVMCITo(getClass());
}
/**
* Gets the provider of the service defined by {@code service} or {@code null} if this object
* does not have a provider for {@code service}.
*/
public abstract <S> S getProvider(Class<S> service);
/**
* Gets the providers of the service defined by {@code service} by querying the
* {@link JVMCIServiceLocator} providers obtained by {@link Services#load(Class)}.
*/
public static <S> List<S> getProviders(Class<S> service) {
List<S> providers = new ArrayList<>();
for (JVMCIServiceLocator access : Services.load(JVMCIServiceLocator.class)) {
S provider = access.getProvider(service);
if (provider != null) {
providers.add(provider);
}
}
return providers;
}
}

View File

@ -349,7 +349,6 @@ public class SPARC extends Architecture {
SUN4V, SUN4V,
BLK_INIT_INSTRUCTIONS, BLK_INIT_INSTRUCTIONS,
FMAF, FMAF,
FMAU,
SPARC64_FAMILY, SPARC64_FAMILY,
M_FAMILY, M_FAMILY,
T_FAMILY, T_FAMILY,

View File

@ -25,12 +25,9 @@
module jdk.vm.ci { module jdk.vm.ci {
exports jdk.vm.ci.services; exports jdk.vm.ci.services;
exports jdk.vm.ci.runtime.services;
exports jdk.vm.ci.hotspot.services;
uses jdk.vm.ci.hotspot.services.HotSpotVMEventListener; uses jdk.vm.ci.services.JVMCIServiceLocator;
uses jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory; uses jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory;
uses jdk.vm.ci.runtime.services.JVMCICompilerFactory;
provides jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory with provides jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory with
jdk.vm.ci.hotspot.aarch64.AArch64HotSpotJVMCIBackendFactory; jdk.vm.ci.hotspot.aarch64.AArch64HotSpotJVMCIBackendFactory;

View File

@ -2563,7 +2563,7 @@ bool os::get_page_info(char *start, page_info* info) {
uint64_t outdata[2]; uint64_t outdata[2];
uint_t validity = 0; uint_t validity = 0;
if (os::Solaris::meminfo(&addr, 1, info_types, 2, outdata, &validity) < 0) { if (meminfo(&addr, 1, info_types, 2, outdata, &validity) < 0) {
return false; return false;
} }
@ -2601,7 +2601,7 @@ char *os::scan_pages(char *start, char* end, page_info* page_expected,
addrs_count++; addrs_count++;
} }
if (os::Solaris::meminfo(addrs, addrs_count, info_types, types, outdata, validity) < 0) { if (meminfo(addrs, addrs_count, info_types, types, outdata, validity) < 0) {
return NULL; return NULL;
} }
@ -4160,9 +4160,6 @@ void os::Solaris::install_signal_handlers() {
void report_error(const char* file_name, int line_no, const char* title, void report_error(const char* file_name, int line_no, const char* title,
const char* format, ...); const char* format, ...);
// (Static) wrapper for getisax(2) call.
os::Solaris::getisax_func_t os::Solaris::_getisax = 0;
// (Static) wrappers for the liblgrp API // (Static) wrappers for the liblgrp API
os::Solaris::lgrp_home_func_t os::Solaris::_lgrp_home; os::Solaris::lgrp_home_func_t os::Solaris::_lgrp_home;
os::Solaris::lgrp_init_func_t os::Solaris::_lgrp_init; os::Solaris::lgrp_init_func_t os::Solaris::_lgrp_init;
@ -4174,9 +4171,6 @@ os::Solaris::lgrp_nlgrps_func_t os::Solaris::_lgrp_nlgrps;
os::Solaris::lgrp_cookie_stale_func_t os::Solaris::_lgrp_cookie_stale; os::Solaris::lgrp_cookie_stale_func_t os::Solaris::_lgrp_cookie_stale;
os::Solaris::lgrp_cookie_t os::Solaris::_lgrp_cookie = 0; os::Solaris::lgrp_cookie_t os::Solaris::_lgrp_cookie = 0;
// (Static) wrapper for meminfo() call.
os::Solaris::meminfo_func_t os::Solaris::_meminfo = 0;
static address resolve_symbol_lazy(const char* name) { static address resolve_symbol_lazy(const char* name) {
address addr = (address) dlsym(RTLD_DEFAULT, name); address addr = (address) dlsym(RTLD_DEFAULT, name);
if (addr == NULL) { if (addr == NULL) {
@ -4300,27 +4294,6 @@ bool os::Solaris::liblgrp_init() {
return false; return false;
} }
void os::Solaris::misc_sym_init() {
address func;
// getisax
func = resolve_symbol_lazy("getisax");
if (func != NULL) {
os::Solaris::_getisax = CAST_TO_FN_PTR(getisax_func_t, func);
}
// meminfo
func = resolve_symbol_lazy("meminfo");
if (func != NULL) {
os::Solaris::set_meminfo(CAST_TO_FN_PTR(meminfo_func_t, func));
}
}
uint_t os::Solaris::getisax(uint32_t* array, uint_t n) {
assert(_getisax != NULL, "_getisax not set");
return _getisax(array, n);
}
// int pset_getloadavg(psetid_t pset, double loadavg[], int nelem); // int pset_getloadavg(psetid_t pset, double loadavg[], int nelem);
typedef long (*pset_getloadavg_type)(psetid_t pset, double loadavg[], int nelem); typedef long (*pset_getloadavg_type)(psetid_t pset, double loadavg[], int nelem);
static pset_getloadavg_type pset_getloadavg_ptr = NULL; static pset_getloadavg_type pset_getloadavg_ptr = NULL;
@ -4351,10 +4324,6 @@ void os::init(void) {
Solaris::initialize_system_info(); Solaris::initialize_system_info();
// Initialize misc. symbols as soon as possible, so we can use them
// if we need them.
Solaris::misc_sym_init();
int fd = ::open("/dev/zero", O_RDWR); int fd = ::open("/dev/zero", O_RDWR);
if (fd < 0) { if (fd < 0) {
fatal("os::init: cannot open /dev/zero (%s)", os::strerror(errno)); fatal("os::init: cannot open /dev/zero (%s)", os::strerror(errno));

View File

@ -73,8 +73,6 @@ class Solaris {
LGRP_VIEW_OS // what's available to operating system LGRP_VIEW_OS // what's available to operating system
} lgrp_view_t; } lgrp_view_t;
typedef uint_t (*getisax_func_t)(uint32_t* array, uint_t n);
typedef lgrp_id_t (*lgrp_home_func_t)(idtype_t idtype, id_t id); typedef lgrp_id_t (*lgrp_home_func_t)(idtype_t idtype, id_t id);
typedef lgrp_cookie_t (*lgrp_init_func_t)(lgrp_view_t view); typedef lgrp_cookie_t (*lgrp_init_func_t)(lgrp_view_t view);
typedef int (*lgrp_fini_func_t)(lgrp_cookie_t cookie); typedef int (*lgrp_fini_func_t)(lgrp_cookie_t cookie);
@ -86,11 +84,6 @@ class Solaris {
lgrp_rsrc_t type); lgrp_rsrc_t type);
typedef int (*lgrp_nlgrps_func_t)(lgrp_cookie_t cookie); typedef int (*lgrp_nlgrps_func_t)(lgrp_cookie_t cookie);
typedef int (*lgrp_cookie_stale_func_t)(lgrp_cookie_t cookie); typedef int (*lgrp_cookie_stale_func_t)(lgrp_cookie_t cookie);
typedef int (*meminfo_func_t)(const uint64_t inaddr[], int addr_count,
const uint_t info_req[], int info_count,
uint64_t outdata[], uint_t validity[]);
static getisax_func_t _getisax;
static lgrp_home_func_t _lgrp_home; static lgrp_home_func_t _lgrp_home;
static lgrp_init_func_t _lgrp_init; static lgrp_init_func_t _lgrp_init;
@ -102,8 +95,6 @@ class Solaris {
static lgrp_cookie_stale_func_t _lgrp_cookie_stale; static lgrp_cookie_stale_func_t _lgrp_cookie_stale;
static lgrp_cookie_t _lgrp_cookie; static lgrp_cookie_t _lgrp_cookie;
static meminfo_func_t _meminfo;
// Large Page Support // Large Page Support
static bool is_valid_page_size(size_t bytes); static bool is_valid_page_size(size_t bytes);
static size_t page_size_for_alignment(size_t alignment); static size_t page_size_for_alignment(size_t alignment);
@ -191,8 +182,6 @@ class Solaris {
static void libthread_init(); static void libthread_init();
static void synchronization_init(); static void synchronization_init();
static bool liblgrp_init(); static bool liblgrp_init();
// Load miscellaneous symbols.
static void misc_sym_init();
// This boolean allows users to forward their own non-matching signals // This boolean allows users to forward their own non-matching signals
// to JVM_handle_solaris_signal, harmlessly. // to JVM_handle_solaris_signal, harmlessly.
static bool signal_handlers_are_installed; static bool signal_handlers_are_installed;
@ -272,17 +261,6 @@ class Solaris {
} }
static lgrp_cookie_t lgrp_cookie() { return _lgrp_cookie; } static lgrp_cookie_t lgrp_cookie() { return _lgrp_cookie; }
static bool supports_getisax() { return _getisax != NULL; }
static uint_t getisax(uint32_t* array, uint_t n);
static void set_meminfo(meminfo_func_t func) { _meminfo = func; }
static int meminfo (const uint64_t inaddr[], int addr_count,
const uint_t info_req[], int info_count,
uint64_t outdata[], uint_t validity[]) {
return _meminfo != NULL ? _meminfo(inaddr, addr_count, info_req, info_count,
outdata, validity) : -1;
}
static sigset_t* unblocked_signals(); static sigset_t* unblocked_signals();
static sigset_t* vm_signals(); static sigset_t* vm_signals();
static sigset_t* allowdebug_blocked_signals(); static sigset_t* allowdebug_blocked_signals();

View File

@ -784,7 +784,7 @@ void os::set_native_thread_name(const char *name) {
__try { __try {
RaiseException (MS_VC_EXCEPTION, 0, sizeof(info)/sizeof(DWORD), (const ULONG_PTR*)&info ); RaiseException (MS_VC_EXCEPTION, 0, sizeof(info)/sizeof(DWORD), (const ULONG_PTR*)&info );
} __except(EXCEPTION_CONTINUE_EXECUTION) {} } __except(EXCEPTION_EXECUTE_HANDLER) {}
} }
bool os::distribute_processes(uint length, uint* distribution) { bool os::distribute_processes(uint length, uint* distribution) {

View File

@ -1404,12 +1404,14 @@ static HANDLE open_sharedmem_object(const char* objectname, DWORD ofm_access, TR
objectname); /* name for object */ objectname); /* name for object */
if (fmh == NULL) { if (fmh == NULL) {
DWORD lasterror = GetLastError();
if (PrintMiscellaneous && Verbose) { if (PrintMiscellaneous && Verbose) {
warning("OpenFileMapping failed for shared memory object %s:" warning("OpenFileMapping failed for shared memory object %s:"
" lasterror = %d\n", objectname, GetLastError()); " lasterror = %d\n", objectname, lasterror);
} }
THROW_MSG_(vmSymbols::java_lang_Exception(), THROW_MSG_(vmSymbols::java_lang_IllegalArgumentException(),
"Could not open PerfMemory", INVALID_HANDLE_VALUE); err_msg("Could not open PerfMemory, error %d", lasterror),
INVALID_HANDLE_VALUE);
} }
return fmh;; return fmh;;

View File

@ -343,8 +343,15 @@ public:
#define _SC_L2CACHE_LINESZ 527 /* Size of L2 cache line */ #define _SC_L2CACHE_LINESZ 527 /* Size of L2 cache line */
#endif #endif
// Hardware capability bits that appeared after Solaris 11.1
#ifndef AV_SPARC_FMAF
#define AV_SPARC_FMAF 0x00000100 /* Fused Multiply-Add */
#endif
#ifndef AV2_SPARC_SPARC5
#define AV2_SPARC_SPARC5 0x00000008 /* The 29 new fp and sub instructions */
#endif
int VM_Version::platform_features(int features) { int VM_Version::platform_features(int features) {
assert(os::Solaris::supports_getisax(), "getisax() must be available");
// Check 32-bit architecture. // Check 32-bit architecture.
if (Sysinfo(SI_ARCHITECTURE_32).match("sparc")) { if (Sysinfo(SI_ARCHITECTURE_32).match("sparc")) {
@ -357,119 +364,75 @@ int VM_Version::platform_features(int features) {
} }
// Extract valid instruction set extensions. // Extract valid instruction set extensions.
uint_t avs[2]; uint_t avs[AV_HW2_IDX + 1];
uint_t avn = os::Solaris::getisax(avs, 2); uint_t avn = getisax(avs, ARRAY_SIZE(avs));
assert(avn <= 2, "should return two or less av's");
uint_t av = avs[0];
log_info(os, cpu)("getisax(2) returned: " PTR32_FORMAT, av); log_info(os, cpu)("getisax(2) returned %d words:", avn);
if (avn > 1) { for (int i = 0; i < avn; i++) {
log_info(os, cpu)(" " PTR32_FORMAT, avs[1]); log_info(os, cpu)(" word %d: " PTR32_FORMAT, i, avs[i]);
} }
if (av & AV_SPARC_MUL32) features |= hardware_mul32_m; uint_t av1 = avs[AV_HW1_IDX];
if (av & AV_SPARC_DIV32) features |= hardware_div32_m; if (av1 & AV_SPARC_MUL32) features |= hardware_mul32_m;
if (av & AV_SPARC_FSMULD) features |= hardware_fsmuld_m; if (av1 & AV_SPARC_DIV32) features |= hardware_div32_m;
if (av & AV_SPARC_V8PLUS) features |= v9_instructions_m; if (av1 & AV_SPARC_FSMULD) features |= hardware_fsmuld_m;
if (av & AV_SPARC_POPC) features |= hardware_popc_m; if (av1 & AV_SPARC_V8PLUS) features |= v9_instructions_m;
if (av & AV_SPARC_VIS) features |= vis1_instructions_m; if (av1 & AV_SPARC_POPC) features |= hardware_popc_m;
if (av & AV_SPARC_VIS2) features |= vis2_instructions_m; if (av1 & AV_SPARC_VIS) features |= vis1_instructions_m;
if (avn > 1) { if (av1 & AV_SPARC_VIS2) features |= vis2_instructions_m;
uint_t av2 = avs[1]; if (av1 & AV_SPARC_ASI_BLK_INIT) features |= blk_init_instructions_m;
#ifndef AV2_SPARC_SPARC5 if (av1 & AV_SPARC_FMAF) features |= fmaf_instructions_m;
#define AV2_SPARC_SPARC5 0x00000008 /* The 29 new fp and sub instructions */ if (av1 & AV_SPARC_VIS3) features |= vis3_instructions_m;
#endif if (av1 & AV_SPARC_CBCOND) features |= cbcond_instructions_m;
if (av2 & AV2_SPARC_SPARC5) features |= sparc5_instructions_m; if (av1 & AV_SPARC_CRC32C) features |= crc32c_instruction_m;
if (av1 & AV_SPARC_AES) features |= aes_instructions_m;
if (av1 & AV_SPARC_SHA1) features |= sha1_instruction_m;
if (av1 & AV_SPARC_SHA256) features |= sha256_instruction_m;
if (av1 & AV_SPARC_SHA512) features |= sha512_instruction_m;
if (avn > AV_HW2_IDX) {
uint_t av2 = avs[AV_HW2_IDX];
if (av2 & AV2_SPARC_SPARC5) features |= sparc5_instructions_m;
} }
// We only build on Solaris 10 and up, but some of the values below
// are not defined on all versions of Solaris 10, so we define them,
// if necessary.
#ifndef AV_SPARC_ASI_BLK_INIT
#define AV_SPARC_ASI_BLK_INIT 0x0080 /* ASI_BLK_INIT_xxx ASI */
#endif
if (av & AV_SPARC_ASI_BLK_INIT) features |= blk_init_instructions_m;
#ifndef AV_SPARC_FMAF
#define AV_SPARC_FMAF 0x0100 /* Fused Multiply-Add */
#endif
if (av & AV_SPARC_FMAF) features |= fmaf_instructions_m;
#ifndef AV_SPARC_FMAU
#define AV_SPARC_FMAU 0x0200 /* Unfused Multiply-Add */
#endif
if (av & AV_SPARC_FMAU) features |= fmau_instructions_m;
#ifndef AV_SPARC_VIS3
#define AV_SPARC_VIS3 0x0400 /* VIS3 instruction set extensions */
#endif
if (av & AV_SPARC_VIS3) features |= vis3_instructions_m;
#ifndef AV_SPARC_CBCOND
#define AV_SPARC_CBCOND 0x10000000 /* compare and branch instrs supported */
#endif
if (av & AV_SPARC_CBCOND) features |= cbcond_instructions_m;
#ifndef AV_SPARC_CRC32C
#define AV_SPARC_CRC32C 0x20000000 /* crc32c instruction supported */
#endif
if (av & AV_SPARC_CRC32C) features |= crc32c_instruction_m;
#ifndef AV_SPARC_AES
#define AV_SPARC_AES 0x00020000 /* aes instrs supported */
#endif
if (av & AV_SPARC_AES) features |= aes_instructions_m;
#ifndef AV_SPARC_SHA1
#define AV_SPARC_SHA1 0x00400000 /* sha1 instruction supported */
#endif
if (av & AV_SPARC_SHA1) features |= sha1_instruction_m;
#ifndef AV_SPARC_SHA256
#define AV_SPARC_SHA256 0x00800000 /* sha256 instruction supported */
#endif
if (av & AV_SPARC_SHA256) features |= sha256_instruction_m;
#ifndef AV_SPARC_SHA512
#define AV_SPARC_SHA512 0x01000000 /* sha512 instruction supported */
#endif
if (av & AV_SPARC_SHA512) features |= sha512_instruction_m;
// Determine the machine type. // Determine the machine type.
if (Sysinfo(SI_MACHINE).match("sun4v")) { if (Sysinfo(SI_MACHINE).match("sun4v")) {
features |= sun4v_m; features |= sun4v_m;
} }
bool use_solaris_12_api = false; // If SI_CPUBRAND works, that means Solaris 12 API to get the cache line sizes
Sysinfo impl(SI_CPUBRAND); // is available to us as well
if (impl.valid()) { Sysinfo cpu_info(SI_CPUBRAND);
// If SI_CPUBRAND works, that means Solaris 12 API to get the cache line sizes bool use_solaris_12_api = cpu_info.valid();
// is available to us as well const char* impl;
use_solaris_12_api = true; int impl_m = 0;
features |= parse_features(impl.value()); if (use_solaris_12_api) {
impl = cpu_info.value();
log_info(os, cpu)("Parsing CPU implementation from %s", impl);
impl_m = parse_features(impl);
} else { } else {
// Otherwise use kstat to determine the machine type. // Otherwise use kstat to determine the machine type.
kstat_ctl_t* kc = kstat_open(); kstat_ctl_t* kc = kstat_open();
kstat_t* ksp = kstat_lookup(kc, (char*)"cpu_info", -1, NULL); if (kc != NULL) {
const char* implementation; kstat_t* ksp = kstat_lookup(kc, (char*)"cpu_info", -1, NULL);
bool has_implementation = false; if (ksp != NULL) {
if (ksp != NULL) { if (kstat_read(kc, ksp, NULL) != -1 && ksp->ks_data != NULL) {
if (kstat_read(kc, ksp, NULL) != -1 && ksp->ks_data != NULL) { kstat_named_t* knm = (kstat_named_t *)ksp->ks_data;
kstat_named_t* knm = (kstat_named_t *)ksp->ks_data; for (int i = 0; i < ksp->ks_ndata; i++) {
for (int i = 0; i < ksp->ks_ndata; i++) { if (strcmp((const char*)&(knm[i].name), "implementation") == 0) {
if (strcmp((const char*)&(knm[i].name),"implementation") == 0) { impl = KSTAT_NAMED_STR_PTR(&knm[i]);
implementation = KSTAT_NAMED_STR_PTR(&knm[i]); log_info(os, cpu)("Parsing CPU implementation from %s", impl);
has_implementation = true; impl_m = parse_features(impl);
log_info(os, cpu)("cpu_info.implementation: %s", implementation); break;
features |= parse_features(implementation); }
break;
} }
} // for( }
} }
kstat_close(kc);
} }
assert(has_implementation, "unknown cpu info (changed kstat interface?)");
kstat_close(kc);
} }
assert(impl_m != 0, "Unknown CPU implementation %s", impl);
features |= impl_m;
bool is_sun4v = (features & sun4v_m) != 0; bool is_sun4v = (features & sun4v_m) != 0;
if (use_solaris_12_api && is_sun4v) { if (use_solaris_12_api && is_sun4v) {

View File

@ -153,6 +153,8 @@ void AbstractAssembler::generate_stack_overflow_check(int frame_size_in_bytes) {
void Label::add_patch_at(CodeBuffer* cb, int branch_loc) { void Label::add_patch_at(CodeBuffer* cb, int branch_loc) {
assert(_loc == -1, "Label is unbound"); assert(_loc == -1, "Label is unbound");
// Don't add patch locations during scratch emit.
if (cb->insts()->scratch_emit()) { return; }
if (_patch_index < PatchCacheSize) { if (_patch_index < PatchCacheSize) {
_patches[_patch_index] = branch_loc; _patches[_patch_index] = branch_loc;
} else { } else {

View File

@ -331,6 +331,8 @@ void CodeSection::relocate(address at, relocInfo::relocType rtype, int format, j
} }
void CodeSection::relocate(address at, RelocationHolder const& spec, int format) { void CodeSection::relocate(address at, RelocationHolder const& spec, int format) {
// Do not relocate in scratch buffers.
if (scratch_emit()) { return; }
Relocation* reloc = spec.reloc(); Relocation* reloc = spec.reloc();
relocInfo::relocType rtype = (relocInfo::relocType) reloc->type(); relocInfo::relocType rtype = (relocInfo::relocType) reloc->type();
if (rtype == relocInfo::none) return; if (rtype == relocInfo::none) return;

View File

@ -92,6 +92,7 @@ class CodeSection VALUE_OBJ_CLASS_SPEC {
address _locs_point; // last relocated position (grows upward) address _locs_point; // last relocated position (grows upward)
bool _locs_own; // did I allocate the locs myself? bool _locs_own; // did I allocate the locs myself?
bool _frozen; // no more expansion of this section bool _frozen; // no more expansion of this section
bool _scratch_emit; // Buffer is used for scratch emit, don't relocate.
char _index; // my section number (SECT_INST, etc.) char _index; // my section number (SECT_INST, etc.)
CodeBuffer* _outer; // enclosing CodeBuffer CodeBuffer* _outer; // enclosing CodeBuffer
@ -108,6 +109,7 @@ class CodeSection VALUE_OBJ_CLASS_SPEC {
_locs_point = NULL; _locs_point = NULL;
_locs_own = false; _locs_own = false;
_frozen = false; _frozen = false;
_scratch_emit = false;
debug_only(_index = (char)-1); debug_only(_index = (char)-1);
debug_only(_outer = (CodeBuffer*)badAddress); debug_only(_outer = (CodeBuffer*)badAddress);
} }
@ -166,6 +168,10 @@ class CodeSection VALUE_OBJ_CLASS_SPEC {
bool is_frozen() const { return _frozen; } bool is_frozen() const { return _frozen; }
bool has_locs() const { return _locs_end != NULL; } bool has_locs() const { return _locs_end != NULL; }
// Mark scratch buffer.
void set_scratch_emit() { _scratch_emit = true; }
bool scratch_emit() { return _scratch_emit; }
CodeBuffer* outer() const { return _outer; } CodeBuffer* outer() const { return _outer; }
// is a given address in this section? (2nd version is end-inclusive) // is a given address in this section? (2nd version is end-inclusive)

View File

@ -1493,6 +1493,21 @@ void GraphBuilder::method_return(Value x, bool ignore_return) {
// Check to see whether we are inlining. If so, Return // Check to see whether we are inlining. If so, Return
// instructions become Gotos to the continuation point. // instructions become Gotos to the continuation point.
if (continuation() != NULL) { if (continuation() != NULL) {
int invoke_bci = state()->caller_state()->bci();
if (x != NULL && !ignore_return) {
ciMethod* caller = state()->scope()->caller()->method();
Bytecodes::Code invoke_raw_bc = caller->raw_code_at_bci(invoke_bci);
if (invoke_raw_bc == Bytecodes::_invokehandle || invoke_raw_bc == Bytecodes::_invokedynamic) {
ciType* declared_ret_type = caller->get_declared_signature_at_bci(invoke_bci)->return_type();
if (declared_ret_type->is_klass() && x->exact_type() == NULL &&
x->declared_type() != declared_ret_type && declared_ret_type != compilation()->env()->Object_klass()) {
x = append(new TypeCast(declared_ret_type->as_klass(), x, copy_state_before()));
}
}
}
assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet"); assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet");
if (compilation()->env()->dtrace_method_probes()) { if (compilation()->env()->dtrace_method_probes()) {
@ -1516,7 +1531,6 @@ void GraphBuilder::method_return(Value x, bool ignore_return) {
// State at end of inlined method is the state of the caller // State at end of inlined method is the state of the caller
// without the method parameters on stack, including the // without the method parameters on stack, including the
// return value, if any, of the inlined method on operand stack. // return value, if any, of the inlined method on operand stack.
int invoke_bci = state()->caller_state()->bci();
set_state(state()->caller_state()->copy_for_parsing()); set_state(state()->caller_state()->copy_for_parsing());
if (x != NULL) { if (x != NULL) {
if (!ignore_return) { if (!ignore_return) {
@ -1929,7 +1943,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) {
// number of implementors for decl_interface is 0 or 1. If // number of implementors for decl_interface is 0 or 1. If
// it's 0 then no class implements decl_interface and there's // it's 0 then no class implements decl_interface and there's
// no point in inlining. // no point in inlining.
if (!holder->is_loaded() || decl_interface->nof_implementors() != 1 || decl_interface->has_default_methods()) { if (!holder->is_loaded() || decl_interface->nof_implementors() != 1 || decl_interface->has_nonstatic_concrete_methods()) {
singleton = NULL; singleton = NULL;
} }
} }
@ -4308,7 +4322,7 @@ void GraphBuilder::print_stats() {
void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) { void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
assert(known_holder == NULL || (known_holder->is_instance_klass() && assert(known_holder == NULL || (known_holder->is_instance_klass() &&
(!known_holder->is_interface() || (!known_holder->is_interface() ||
((ciInstanceKlass*)known_holder)->has_default_methods())), "should be default method"); ((ciInstanceKlass*)known_holder)->has_nonstatic_concrete_methods())), "should be non-static concrete method");
if (known_holder != NULL) { if (known_holder != NULL) {
if (known_holder->exact_klass() == NULL) { if (known_holder->exact_klass() == NULL) {
known_holder = compilation()->cha_exact_type(known_holder); known_holder = compilation()->cha_exact_type(known_holder);

View File

@ -360,7 +360,8 @@ void Invoke::state_values_do(ValueVisitor* f) {
} }
ciType* Invoke::declared_type() const { ciType* Invoke::declared_type() const {
ciType *t = _target->signature()->return_type(); ciSignature* declared_signature = state()->scope()->method()->get_declared_signature_at_bci(state()->bci());
ciType *t = declared_signature->return_type();
assert(t->basic_type() != T_VOID, "need return value of void method?"); assert(t->basic_type() != T_VOID, "need return value of void method?");
return t; return t;
} }

View File

@ -58,7 +58,7 @@ ciInstanceKlass::ciInstanceKlass(KlassHandle h_k) :
_init_state = ik->init_state(); _init_state = ik->init_state();
_nonstatic_field_size = ik->nonstatic_field_size(); _nonstatic_field_size = ik->nonstatic_field_size();
_has_nonstatic_fields = ik->has_nonstatic_fields(); _has_nonstatic_fields = ik->has_nonstatic_fields();
_has_default_methods = ik->has_default_methods(); _has_nonstatic_concrete_methods = ik->has_nonstatic_concrete_methods();
_is_anonymous = ik->is_anonymous(); _is_anonymous = ik->is_anonymous();
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields: _nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
_has_injected_fields = -1; _has_injected_fields = -1;

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 1999, 2015, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -52,7 +52,7 @@ private:
bool _has_finalizer; bool _has_finalizer;
bool _has_subklass; bool _has_subklass;
bool _has_nonstatic_fields; bool _has_nonstatic_fields;
bool _has_default_methods; bool _has_nonstatic_concrete_methods;
bool _is_anonymous; bool _is_anonymous;
ciFlags _flags; ciFlags _flags;
@ -174,9 +174,9 @@ public:
return 2; return 2;
} }
} }
bool has_default_methods() { bool has_nonstatic_concrete_methods() {
assert(is_loaded(), "must be loaded"); assert(is_loaded(), "must be loaded");
return _has_default_methods; return _has_nonstatic_concrete_methods;
} }
bool is_anonymous() { bool is_anonymous() {

View File

@ -256,6 +256,14 @@ class ciMethod : public ciMetadata {
return get_method_at_bci(bci, ignored_will_link, &ignored_declared_signature); return get_method_at_bci(bci, ignored_will_link, &ignored_declared_signature);
} }
ciSignature* get_declared_signature_at_bci(int bci) {
bool ignored_will_link;
ciSignature* declared_signature;
get_method_at_bci(bci, ignored_will_link, &declared_signature);
assert(declared_signature != NULL, "cannot be null");
return declared_signature;
}
// Given a certain calling environment, find the monomorphic target // Given a certain calling environment, find the monomorphic target
// for the call. Return NULL if the call is not monomorphic in // for the call. Return NULL if the call is not monomorphic in
// its calling environment. // its calling environment.

View File

@ -798,11 +798,11 @@ static bool put_after_lookup(const Symbol* name, const Symbol* sig, NameSigHash*
void ClassFileParser::parse_interfaces(const ClassFileStream* const stream, void ClassFileParser::parse_interfaces(const ClassFileStream* const stream,
const int itfs_len, const int itfs_len,
ConstantPool* const cp, ConstantPool* const cp,
bool* const has_default_methods, bool* const has_nonstatic_concrete_methods,
TRAPS) { TRAPS) {
assert(stream != NULL, "invariant"); assert(stream != NULL, "invariant");
assert(cp != NULL, "invariant"); assert(cp != NULL, "invariant");
assert(has_default_methods != NULL, "invariant"); assert(has_nonstatic_concrete_methods != NULL, "invariant");
if (itfs_len == 0) { if (itfs_len == 0) {
_local_interfaces = Universe::the_empty_klass_array(); _local_interfaces = Universe::the_empty_klass_array();
@ -844,8 +844,8 @@ void ClassFileParser::parse_interfaces(const ClassFileStream* const stream,
"Implementing class"); "Implementing class");
} }
if (InstanceKlass::cast(interf())->has_default_methods()) { if (InstanceKlass::cast(interf())->has_nonstatic_concrete_methods()) {
*has_default_methods = true; *has_nonstatic_concrete_methods = true;
} }
_local_interfaces->at_put(index, interf()); _local_interfaces->at_put(index, interf());
} }
@ -2830,12 +2830,12 @@ void ClassFileParser::parse_methods(const ClassFileStream* const cfs,
bool is_interface, bool is_interface,
AccessFlags* promoted_flags, AccessFlags* promoted_flags,
bool* has_final_method, bool* has_final_method,
bool* declares_default_methods, bool* declares_nonstatic_concrete_methods,
TRAPS) { TRAPS) {
assert(cfs != NULL, "invariant"); assert(cfs != NULL, "invariant");
assert(promoted_flags != NULL, "invariant"); assert(promoted_flags != NULL, "invariant");
assert(has_final_method != NULL, "invariant"); assert(has_final_method != NULL, "invariant");
assert(declares_default_methods != NULL, "invariant"); assert(declares_nonstatic_concrete_methods != NULL, "invariant");
assert(NULL == _methods, "invariant"); assert(NULL == _methods, "invariant");
@ -2860,11 +2860,11 @@ void ClassFileParser::parse_methods(const ClassFileStream* const cfs,
if (method->is_final()) { if (method->is_final()) {
*has_final_method = true; *has_final_method = true;
} }
// declares_default_methods: declares concrete instance methods, any access flags // declares_nonstatic_concrete_methods: declares concrete instance methods, any access flags
// used for interface initialization, and default method inheritance analysis // used for interface initialization, and default method inheritance analysis
if (is_interface && !(*declares_default_methods) if (is_interface && !(*declares_nonstatic_concrete_methods)
&& !method->is_abstract() && !method->is_static()) { && !method->is_abstract() && !method->is_static()) {
*declares_default_methods = true; *declares_nonstatic_concrete_methods = true;
} }
_methods->at_put(index, method); _methods->at_put(index, method);
} }
@ -5250,8 +5250,8 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
ik->set_minor_version(_minor_version); ik->set_minor_version(_minor_version);
ik->set_major_version(_major_version); ik->set_major_version(_major_version);
ik->set_has_default_methods(_has_default_methods); ik->set_has_nonstatic_concrete_methods(_has_nonstatic_concrete_methods);
ik->set_declares_default_methods(_declares_default_methods); ik->set_declares_nonstatic_concrete_methods(_declares_nonstatic_concrete_methods);
if (_host_klass != NULL) { if (_host_klass != NULL) {
assert (ik->is_anonymous(), "should be the same"); assert (ik->is_anonymous(), "should be the same");
@ -5311,12 +5311,9 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
// check if this class overrides any final method // check if this class overrides any final method
check_final_method_override(ik, CHECK); check_final_method_override(ik, CHECK);
// check that if this class is an interface then it doesn't have static methods // reject static interface methods prior to Java 8
if (ik->is_interface()) { if (ik->is_interface() && _major_version < JAVA_8_VERSION) {
/* An interface in a JAVA 8 classfile can be static */ check_illegal_static_method(ik, CHECK);
if (_major_version < JAVA_8_VERSION) {
check_illegal_static_method(ik, CHECK);
}
} }
// Obtain this_klass' module entry // Obtain this_klass' module entry
@ -5336,9 +5333,9 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
assert(_all_mirandas != NULL, "invariant"); assert(_all_mirandas != NULL, "invariant");
// Generate any default methods - default methods are interface methods // Generate any default methods - default methods are public interface methods
// that have a default implementation. This is new with Lambda project. // that have a default implementation. This is new with Java 8.
if (_has_default_methods ) { if (_has_nonstatic_concrete_methods) {
DefaultMethods::generate_default_methods(ik, DefaultMethods::generate_default_methods(ik,
_all_mirandas, _all_mirandas,
CHECK); CHECK);
@ -5523,8 +5520,8 @@ ClassFileParser::ClassFileParser(ClassFileStream* stream,
_java_fields_count(0), _java_fields_count(0),
_need_verify(false), _need_verify(false),
_relax_verify(false), _relax_verify(false),
_has_default_methods(false), _has_nonstatic_concrete_methods(false),
_declares_default_methods(false), _declares_nonstatic_concrete_methods(false),
_has_final_method(false), _has_final_method(false),
_has_finalizer(false), _has_finalizer(false),
_has_empty_finalizer(false), _has_empty_finalizer(false),
@ -5778,9 +5775,22 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
// Anonymous classes such as generated LambdaForm classes are also not included. // Anonymous classes such as generated LambdaForm classes are also not included.
if (SystemDictionaryShared::is_sharing_possible(_loader_data) && if (SystemDictionaryShared::is_sharing_possible(_loader_data) &&
_host_klass == NULL) { _host_klass == NULL) {
oop class_loader = _loader_data->class_loader();
ResourceMark rm(THREAD); ResourceMark rm(THREAD);
classlist_file->print_cr("%s", _class_name->as_C_string()); // For the boot and platform class loaders, check if the class is not found in the
classlist_file->flush(); // java runtime image. Additional check for the boot class loader is if the class
// is not found in the boot loader's appended entries. This indicates that the class
// is not useable during run time, such as the ones found in the --patch-module entries,
// so it should not be included in the classlist file.
if (((class_loader == NULL && !ClassLoader::contains_append_entry(stream->source())) ||
SystemDictionary::is_platform_class_loader(class_loader)) &&
!ClassLoader::is_jrt(stream->source())) {
tty->print_cr("skip writing class %s from source %s to classlist file",
_class_name->as_C_string(), stream->source());
} else {
classlist_file->print_cr("%s", _class_name->as_C_string());
classlist_file->flush();
}
} }
} }
#endif #endif
@ -5798,7 +5808,7 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
parse_interfaces(stream, parse_interfaces(stream,
_itfs_len, _itfs_len,
cp, cp,
&_has_default_methods, &_has_nonstatic_concrete_methods,
CHECK); CHECK);
assert(_local_interfaces != NULL, "invariant"); assert(_local_interfaces != NULL, "invariant");
@ -5821,7 +5831,7 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
_access_flags.is_interface(), _access_flags.is_interface(),
&promoted_flags, &promoted_flags,
&_has_final_method, &_has_final_method,
&_declares_default_methods, &_declares_nonstatic_concrete_methods,
CHECK); CHECK);
assert(_methods != NULL, "invariant"); assert(_methods != NULL, "invariant");
@ -5829,8 +5839,8 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
// promote flags from parse_methods() to the klass' flags // promote flags from parse_methods() to the klass' flags
_access_flags.add_promoted_flags(promoted_flags.as_int()); _access_flags.add_promoted_flags(promoted_flags.as_int());
if (_declares_default_methods) { if (_declares_nonstatic_concrete_methods) {
_has_default_methods = true; _has_nonstatic_concrete_methods = true;
} }
// Additional attributes/annotations // Additional attributes/annotations
@ -5884,8 +5894,8 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
} }
if (_super_klass != NULL) { if (_super_klass != NULL) {
if (_super_klass->has_default_methods()) { if (_super_klass->has_nonstatic_concrete_methods()) {
_has_default_methods = true; _has_nonstatic_concrete_methods = true;
} }
if (_super_klass->is_interface()) { if (_super_klass->is_interface()) {

View File

@ -139,8 +139,8 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
bool _need_verify; bool _need_verify;
bool _relax_verify; bool _relax_verify;
bool _has_default_methods; bool _has_nonstatic_concrete_methods;
bool _declares_default_methods; bool _declares_nonstatic_concrete_methods;
bool _has_final_method; bool _has_final_method;
// precomputed flags // precomputed flags
@ -186,7 +186,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
void parse_interfaces(const ClassFileStream* const stream, void parse_interfaces(const ClassFileStream* const stream,
const int itfs_len, const int itfs_len,
ConstantPool* const cp, ConstantPool* const cp,
bool* has_default_methods, bool* has_nonstatic_concrete_methods,
TRAPS); TRAPS);
const InstanceKlass* parse_super_class(ConstantPool* const cp, const InstanceKlass* parse_super_class(ConstantPool* const cp,
@ -224,7 +224,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
bool is_interface, bool is_interface,
AccessFlags* const promoted_flags, AccessFlags* const promoted_flags,
bool* const has_final_method, bool* const has_final_method,
bool* const declares_default_methods, bool* const declares_nonstatic_concrete_methods,
TRAPS); TRAPS);
const u2* parse_exception_table(const ClassFileStream* const stream, const u2* parse_exception_table(const ClassFileStream* const stream,

View File

@ -81,7 +81,6 @@ typedef void * * (JNICALL *ZipOpen_t)(const char *name, char **pmsg);
typedef void (JNICALL *ZipClose_t)(jzfile *zip); typedef void (JNICALL *ZipClose_t)(jzfile *zip);
typedef jzentry* (JNICALL *FindEntry_t)(jzfile *zip, const char *name, jint *sizeP, jint *nameLen); typedef jzentry* (JNICALL *FindEntry_t)(jzfile *zip, const char *name, jint *sizeP, jint *nameLen);
typedef jboolean (JNICALL *ReadEntry_t)(jzfile *zip, jzentry *entry, unsigned char *buf, char *namebuf); typedef jboolean (JNICALL *ReadEntry_t)(jzfile *zip, jzentry *entry, unsigned char *buf, char *namebuf);
typedef jboolean (JNICALL *ReadMappedEntry_t)(jzfile *zip, jzentry *entry, unsigned char **buf, char *namebuf);
typedef jzentry* (JNICALL *GetNextEntry_t)(jzfile *zip, jint n); typedef jzentry* (JNICALL *GetNextEntry_t)(jzfile *zip, jint n);
typedef jboolean (JNICALL *ZipInflateFully_t)(void *inBuf, jlong inLen, void *outBuf, jlong outLen, char **pmsg); typedef jboolean (JNICALL *ZipInflateFully_t)(void *inBuf, jlong inLen, void *outBuf, jlong outLen, char **pmsg);
typedef jint (JNICALL *Crc32_t)(jint crc, const jbyte *buf, jint len); typedef jint (JNICALL *Crc32_t)(jint crc, const jbyte *buf, jint len);
@ -91,7 +90,6 @@ static ZipOpen_t ZipOpen = NULL;
static ZipClose_t ZipClose = NULL; static ZipClose_t ZipClose = NULL;
static FindEntry_t FindEntry = NULL; static FindEntry_t FindEntry = NULL;
static ReadEntry_t ReadEntry = NULL; static ReadEntry_t ReadEntry = NULL;
static ReadMappedEntry_t ReadMappedEntry = NULL;
static GetNextEntry_t GetNextEntry = NULL; static GetNextEntry_t GetNextEntry = NULL;
static canonicalize_fn_t CanonicalizeEntry = NULL; static canonicalize_fn_t CanonicalizeEntry = NULL;
static ZipInflateFully_t ZipInflateFully = NULL; static ZipInflateFully_t ZipInflateFully = NULL;
@ -353,15 +351,10 @@ u1* ClassPathZipEntry::open_entry(const char* name, jint* filesize, bool nul_ter
filename = NEW_RESOURCE_ARRAY(char, name_len + 1); filename = NEW_RESOURCE_ARRAY(char, name_len + 1);
} }
// file found, get pointer to the entry in mmapped jar file. // read contents into resource array
if (ReadMappedEntry == NULL || int size = (*filesize) + ((nul_terminate) ? 1 : 0);
!(*ReadMappedEntry)(_zip, entry, &buffer, filename)) { buffer = NEW_RESOURCE_ARRAY(u1, size);
// mmapped access not available, perhaps due to compression, if (!(*ReadEntry)(_zip, entry, buffer, filename)) return NULL;
// read contents into resource array
int size = (*filesize) + ((nul_terminate) ? 1 : 0);
buffer = NEW_RESOURCE_ARRAY(u1, size);
if (!(*ReadEntry)(_zip, entry, buffer, filename)) return NULL;
}
// return result // return result
if (nul_terminate) { if (nul_terminate) {
@ -952,11 +945,11 @@ ClassPathZipEntry* ClassLoader::create_class_path_zip_entry(const char *path, bo
} }
// returns true if entry already on class path // returns true if entry already on class path
bool ClassLoader::contains_entry(ClassPathEntry *entry) { bool ClassLoader::contains_append_entry(const char* name) {
ClassPathEntry* e = _first_append_entry; ClassPathEntry* e = _first_append_entry;
while (e != NULL) { while (e != NULL) {
// assume zip entries have been canonicalized // assume zip entries have been canonicalized
if (strcmp(entry->name(), e->name()) == 0) { if (strcmp(name, e->name()) == 0) {
return true; return true;
} }
e = e->next(); e = e->next();
@ -998,7 +991,7 @@ bool ClassLoader::update_class_path_entry_list(const char *path,
// Do not reorder the bootclasspath which would break get_system_package(). // Do not reorder the bootclasspath which would break get_system_package().
// Add new entry to linked list // Add new entry to linked list
if (!check_for_duplicates || !contains_entry(new_entry)) { if (!check_for_duplicates || !contains_append_entry(new_entry->name())) {
ClassLoaderExt::add_class_path_entry(path, check_for_duplicates, new_entry); ClassLoaderExt::add_class_path_entry(path, check_for_duplicates, new_entry);
} }
return true; return true;
@ -1079,7 +1072,6 @@ void ClassLoader::load_zip_library() {
ZipClose = CAST_TO_FN_PTR(ZipClose_t, os::dll_lookup(handle, "ZIP_Close")); ZipClose = CAST_TO_FN_PTR(ZipClose_t, os::dll_lookup(handle, "ZIP_Close"));
FindEntry = CAST_TO_FN_PTR(FindEntry_t, os::dll_lookup(handle, "ZIP_FindEntry")); FindEntry = CAST_TO_FN_PTR(FindEntry_t, os::dll_lookup(handle, "ZIP_FindEntry"));
ReadEntry = CAST_TO_FN_PTR(ReadEntry_t, os::dll_lookup(handle, "ZIP_ReadEntry")); ReadEntry = CAST_TO_FN_PTR(ReadEntry_t, os::dll_lookup(handle, "ZIP_ReadEntry"));
ReadMappedEntry = CAST_TO_FN_PTR(ReadMappedEntry_t, os::dll_lookup(handle, "ZIP_ReadMappedEntry"));
GetNextEntry = CAST_TO_FN_PTR(GetNextEntry_t, os::dll_lookup(handle, "ZIP_GetNextEntry")); GetNextEntry = CAST_TO_FN_PTR(GetNextEntry_t, os::dll_lookup(handle, "ZIP_GetNextEntry"));
ZipInflateFully = CAST_TO_FN_PTR(ZipInflateFully_t, os::dll_lookup(handle, "ZIP_InflateFully")); ZipInflateFully = CAST_TO_FN_PTR(ZipInflateFully_t, os::dll_lookup(handle, "ZIP_InflateFully"));
Crc32 = CAST_TO_FN_PTR(Crc32_t, os::dll_lookup(handle, "ZIP_CRC32")); Crc32 = CAST_TO_FN_PTR(Crc32_t, os::dll_lookup(handle, "ZIP_CRC32"));
@ -2049,7 +2041,6 @@ void ClassLoader::compile_the_world_in(char* name, Handle loader, TRAPS) {
if (nm != NULL && !m->is_method_handle_intrinsic()) { if (nm != NULL && !m->is_method_handle_intrinsic()) {
// Throw out the code so that the code cache doesn't fill up // Throw out the code so that the code cache doesn't fill up
nm->make_not_entrant(); nm->make_not_entrant();
m->clear_code();
} }
CompileBroker::compile_method(m, InvocationEntryBci, CompLevel_full_optimization, CompileBroker::compile_method(m, InvocationEntryBci, CompLevel_full_optimization,
methodHandle(), 0, CompileTask::Reason_CTW, THREAD); methodHandle(), 0, CompileTask::Reason_CTW, THREAD);
@ -2068,7 +2059,6 @@ void ClassLoader::compile_the_world_in(char* name, Handle loader, TRAPS) {
if (nm != NULL && !m->is_method_handle_intrinsic()) { if (nm != NULL && !m->is_method_handle_intrinsic()) {
// Throw out the code so that the code cache doesn't fill up // Throw out the code so that the code cache doesn't fill up
nm->make_not_entrant(); nm->make_not_entrant();
m->clear_code();
} }
} }
} }

View File

@ -451,7 +451,7 @@ class ClassLoader: AllStatic {
static void set_first_append_entry(ClassPathEntry* entry); static void set_first_append_entry(ClassPathEntry* entry);
// indicates if class path already contains a entry (exact match by name) // indicates if class path already contains a entry (exact match by name)
static bool contains_entry(ClassPathEntry* entry); static bool contains_append_entry(const char* name);
// adds a class path list // adds a class path list
static void add_to_list(ClassPathEntry* new_entry); static void add_to_list(ClassPathEntry* new_entry);

View File

@ -639,7 +639,6 @@ const char* ClassLoaderData::loader_name() {
#undef CLD_DUMP_KLASSES #undef CLD_DUMP_KLASSES
void ClassLoaderData::dump(outputStream * const out) { void ClassLoaderData::dump(outputStream * const out) {
ResourceMark rm;
out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {", out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {",
p2i(this), p2i((void *)class_loader()), p2i(this), p2i((void *)class_loader()),
p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name()); p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name());
@ -656,7 +655,6 @@ void ClassLoaderData::dump(outputStream * const out) {
#ifdef CLD_DUMP_KLASSES #ifdef CLD_DUMP_KLASSES
if (Verbose) { if (Verbose) {
ResourceMark rm;
Klass* k = _klasses; Klass* k = _klasses;
while (k != NULL) { while (k != NULL) {
out->print_cr("klass " PTR_FORMAT ", %s, CT: %d, MUT: %d", k, k->name()->as_C_string(), out->print_cr("klass " PTR_FORMAT ", %s, CT: %d, MUT: %d", k, k->name()->as_C_string(),

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -914,7 +914,7 @@ static void create_defaults_and_exceptions(
BytecodeBuffer buffer; BytecodeBuffer buffer;
if (log_is_enabled(Debug, defaultmethods)) { if (log_is_enabled(Debug, defaultmethods)) {
ResourceMark rm; ResourceMark rm(THREAD);
outputStream* logstream = Log(defaultmethods)::debug_stream(); outputStream* logstream = Log(defaultmethods)::debug_stream();
logstream->print("for slot: "); logstream->print("for slot: ");
slot->print_on(logstream); slot->print_on(logstream);
@ -929,6 +929,7 @@ static void create_defaults_and_exceptions(
if (method->has_target()) { if (method->has_target()) {
Method* selected = method->get_selected_target(); Method* selected = method->get_selected_target();
if (selected->method_holder()->is_interface()) { if (selected->method_holder()->is_interface()) {
assert(!selected->is_private(), "pushing private interface method as default");
defaults.push(selected); defaults.push(selected);
} }
} else if (method->throws_exception()) { } else if (method->throws_exception()) {

View File

@ -780,19 +780,26 @@ void java_lang_Class::set_mirror_module_field(KlassHandle k, Handle mirror, Hand
// Put the class on the fixup_module_list to patch later when the java.lang.reflect.Module // Put the class on the fixup_module_list to patch later when the java.lang.reflect.Module
// for java.base is known. // for java.base is known.
assert(!Universe::is_module_initialized(), "Incorrect java.lang.reflect.Module pre module system initialization"); assert(!Universe::is_module_initialized(), "Incorrect java.lang.reflect.Module pre module system initialization");
MutexLocker m1(Module_lock, THREAD);
// Keep list of classes needing java.base module fixup bool javabase_was_defined = false;
if (!ModuleEntryTable::javabase_defined()) { {
if (fixup_module_field_list() == NULL) { MutexLocker m1(Module_lock, THREAD);
GrowableArray<Klass*>* list = // Keep list of classes needing java.base module fixup
new (ResourceObj::C_HEAP, mtModule) GrowableArray<Klass*>(500, true); if (!ModuleEntryTable::javabase_defined()) {
set_fixup_module_field_list(list); if (fixup_module_field_list() == NULL) {
GrowableArray<Klass*>* list =
new (ResourceObj::C_HEAP, mtModule) GrowableArray<Klass*>(500, true);
set_fixup_module_field_list(list);
}
k->class_loader_data()->inc_keep_alive();
fixup_module_field_list()->push(k());
} else {
javabase_was_defined = true;
} }
k->class_loader_data()->inc_keep_alive(); }
fixup_module_field_list()->push(k());
} else { // If java.base was already defined then patch this particular class with java.base.
// java.base was defined at some point between calling create_mirror() if (javabase_was_defined) {
// and obtaining the Module_lock, patch this particular class with java.base.
ModuleEntry *javabase_entry = ModuleEntryTable::javabase_moduleEntry(); ModuleEntry *javabase_entry = ModuleEntryTable::javabase_moduleEntry();
assert(javabase_entry != NULL && javabase_entry->module() != NULL, assert(javabase_entry != NULL && javabase_entry->module() != NULL,
"Setting class module field, java.base should be defined"); "Setting class module field, java.base should be defined");

View File

@ -74,7 +74,7 @@ instanceKlassHandle KlassFactory::check_shared_class_file_load_hook(
(SharedClassPathEntry*)FileMapInfo::shared_classpath(path_index); (SharedClassPathEntry*)FileMapInfo::shared_classpath(path_index);
ClassFileStream* stream = new ClassFileStream(ptr, ClassFileStream* stream = new ClassFileStream(ptr,
end_ptr - ptr, end_ptr - ptr,
ent->_name, ent == NULL ? NULL : ent->_name,
ClassFileStream::verify); ClassFileStream::verify);
ClassFileParser parser(stream, ClassFileParser parser(stream,
class_name, class_name,

View File

@ -368,9 +368,6 @@ void ModuleEntryTable::finalize_javabase(Handle module_handle, Symbol* version,
// Store pointer to the ModuleEntry for java.base in the java.lang.reflect.Module object. // Store pointer to the ModuleEntry for java.base in the java.lang.reflect.Module object.
java_lang_reflect_Module::set_module_entry(module_handle(), jb_module); java_lang_reflect_Module::set_module_entry(module_handle(), jb_module);
// Patch any previously loaded classes' module field with java.base's java.lang.reflect.Module.
patch_javabase_entries(module_handle);
} }
// Within java.lang.Class instances there is a java.lang.reflect.Module field // Within java.lang.Class instances there is a java.lang.reflect.Module field
@ -378,7 +375,6 @@ void ModuleEntryTable::finalize_javabase(Handle module_handle, Symbol* version,
// definition, classes needing their module field set are added to the fixup_module_list. // definition, classes needing their module field set are added to the fixup_module_list.
// Their module field is set once java.base's java.lang.reflect.Module is known to the VM. // Their module field is set once java.base's java.lang.reflect.Module is known to the VM.
void ModuleEntryTable::patch_javabase_entries(Handle module_handle) { void ModuleEntryTable::patch_javabase_entries(Handle module_handle) {
assert(Module_lock->owned_by_self(), "should have the Module_lock");
if (module_handle.is_null()) { if (module_handle.is_null()) {
fatal("Unable to patch the module field of classes loaded prior to java.base's definition, invalid java.lang.reflect.Module"); fatal("Unable to patch the module field of classes loaded prior to java.base's definition, invalid java.lang.reflect.Module");
} }

View File

@ -244,6 +244,12 @@ static void define_javabase_module(jobject module, jstring version,
"Module java.base is already defined"); "Module java.base is already defined");
} }
// Only the thread that actually defined the base module will get here,
// so no locking is needed.
// Patch any previously loaded class's module field with java.base's java.lang.reflect.Module.
ModuleEntryTable::patch_javabase_entries(module_handle);
log_debug(modules)("define_javabase_module(): Definition of module: java.base," log_debug(modules)("define_javabase_module(): Definition of module: java.base,"
" version: %s, location: %s, package #: %d", " version: %s, location: %s, package #: %d",
module_version != NULL ? module_version : "NULL", module_version != NULL ? module_version : "NULL",

View File

@ -1234,7 +1234,7 @@ bool SystemDictionary::is_shared_class_visible(Symbol* class_name,
SharedClassPathEntry* ent = SharedClassPathEntry* ent =
(SharedClassPathEntry*)FileMapInfo::shared_classpath(path_index); (SharedClassPathEntry*)FileMapInfo::shared_classpath(path_index);
if (!Universe::is_module_initialized()) { if (!Universe::is_module_initialized()) {
assert(ent->is_jrt(), assert(ent != NULL && ent->is_jrt(),
"Loading non-bootstrap classes before the module system is initialized"); "Loading non-bootstrap classes before the module system is initialized");
assert(class_loader.is_null(), "sanity"); assert(class_loader.is_null(), "sanity");
return true; return true;
@ -1257,6 +1257,7 @@ bool SystemDictionary::is_shared_class_visible(Symbol* class_name,
} }
if (class_loader.is_null()) { if (class_loader.is_null()) {
assert(ent != NULL, "Shared class for NULL classloader must have valid SharedClassPathEntry");
// The NULL classloader can load archived class originated from the // The NULL classloader can load archived class originated from the
// "modules" jimage and the -Xbootclasspath/a. For class from the // "modules" jimage and the -Xbootclasspath/a. For class from the
// "modules" jimage, the PackageEntry/ModuleEntry must be defined // "modules" jimage, the PackageEntry/ModuleEntry must be defined

View File

@ -226,7 +226,7 @@ class SystemDictionary : AllStatic {
WKID_LIMIT, WKID_LIMIT,
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
FIRST_JVMCI_WKID = WK_KLASS_ENUM_NAME(HotSpotCompiledCode_klass), FIRST_JVMCI_WKID = WK_KLASS_ENUM_NAME(JVMCI_klass),
LAST_JVMCI_WKID = WK_KLASS_ENUM_NAME(Value_klass), LAST_JVMCI_WKID = WK_KLASS_ENUM_NAME(Value_klass),
#endif #endif

View File

@ -1252,7 +1252,7 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) {
if (method() != NULL && (method()->code() == this || if (method() != NULL && (method()->code() == this ||
method()->from_compiled_entry() == verified_entry_point())) { method()->from_compiled_entry() == verified_entry_point())) {
HandleMark hm; HandleMark hm;
method()->clear_code(); method()->clear_code(false /* already owns Patching_lock */);
} }
} // leave critical region under Patching_lock } // leave critical region under Patching_lock

View File

@ -2340,13 +2340,11 @@ void CMSCollector::verify_after_remark_work_1() {
{ {
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->cms_process_roots(&srs,
GenCollectedHeap::OldGen,
true, // young gen as roots true, // young gen as roots
GenCollectedHeap::ScanningOption(roots_scanning_options()), GenCollectedHeap::ScanningOption(roots_scanning_options()),
should_unload_classes(), should_unload_classes(),
&notOlder, &notOlder,
NULL,
NULL); NULL);
} }
@ -2414,13 +2412,11 @@ void CMSCollector::verify_after_remark_work_2() {
{ {
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->cms_process_roots(&srs,
GenCollectedHeap::OldGen,
true, // young gen as roots true, // young gen as roots
GenCollectedHeap::ScanningOption(roots_scanning_options()), GenCollectedHeap::ScanningOption(roots_scanning_options()),
should_unload_classes(), should_unload_classes(),
&notOlder, &notOlder,
NULL,
&cld_closure); &cld_closure);
} }
@ -2903,13 +2899,11 @@ void CMSCollector::checkpointRootsInitialWork() {
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->cms_process_roots(&srs,
GenCollectedHeap::OldGen,
true, // young gen as roots true, // young gen as roots
GenCollectedHeap::ScanningOption(roots_scanning_options()), GenCollectedHeap::ScanningOption(roots_scanning_options()),
should_unload_classes(), should_unload_classes(),
&notOlder, &notOlder,
NULL,
&cld_closure); &cld_closure);
} }
} }
@ -4290,13 +4284,11 @@ void CMSParInitialMarkTask::work(uint worker_id) {
CLDToOopClosure cld_closure(&par_mri_cl, true); CLDToOopClosure cld_closure(&par_mri_cl, true);
gch->gen_process_roots(_strong_roots_scope, gch->cms_process_roots(_strong_roots_scope,
GenCollectedHeap::OldGen,
false, // yg was scanned above false, // yg was scanned above
GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()), GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
_collector->should_unload_classes(), _collector->should_unload_classes(),
&par_mri_cl, &par_mri_cl,
NULL,
&cld_closure); &cld_closure);
assert(_collector->should_unload_classes() assert(_collector->should_unload_classes()
|| (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache), || (_collector->CMSCollector::roots_scanning_options() & GenCollectedHeap::SO_AllCodeCache),
@ -4421,13 +4413,11 @@ void CMSParRemarkTask::work(uint worker_id) {
// ---------- remaining roots -------------- // ---------- remaining roots --------------
_timer.reset(); _timer.reset();
_timer.start(); _timer.start();
gch->gen_process_roots(_strong_roots_scope, gch->cms_process_roots(_strong_roots_scope,
GenCollectedHeap::OldGen,
false, // yg was scanned above false, // yg was scanned above
GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()), GenCollectedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
_collector->should_unload_classes(), _collector->should_unload_classes(),
&par_mrias_cl, &par_mrias_cl,
NULL,
NULL); // The dirty klasses will be handled below NULL); // The dirty klasses will be handled below
assert(_collector->should_unload_classes() assert(_collector->should_unload_classes()
@ -4970,13 +4960,11 @@ void CMSCollector::do_remark_non_parallel() {
gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel. gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->cms_process_roots(&srs,
GenCollectedHeap::OldGen,
true, // young gen as roots true, // young gen as roots
GenCollectedHeap::ScanningOption(roots_scanning_options()), GenCollectedHeap::ScanningOption(roots_scanning_options()),
should_unload_classes(), should_unload_classes(),
&mrias_cl, &mrias_cl,
NULL,
NULL); // The dirty klasses will be handled below NULL); // The dirty klasses will be handled below
assert(should_unload_classes() assert(should_unload_classes()

View File

@ -605,14 +605,10 @@ void ParNewGenTask::work(uint worker_id) {
false); false);
par_scan_state.start_strong_roots(); par_scan_state.start_strong_roots();
gch->gen_process_roots(_strong_roots_scope, gch->young_process_roots(_strong_roots_scope,
GenCollectedHeap::YoungGen, &par_scan_state.to_space_root_closure(),
true, // Process younger gens, if any, as strong roots. &par_scan_state.older_gen_closure(),
GenCollectedHeap::SO_ScavengeCodeCache, &cld_scan_closure);
GenCollectedHeap::StrongAndWeakRoots,
&par_scan_state.to_space_root_closure(),
&par_scan_state.older_gen_closure(),
&cld_scan_closure);
par_scan_state.end_strong_roots(); par_scan_state.end_strong_roots();

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
@ -53,99 +53,4 @@ void G1BiasedMappedArrayBase::verify_biased_index_inclusive_end(idx_t biased_ind
biased_index, bias(), length()); biased_index, bias(), length());
} }
class TestMappedArray : public G1BiasedMappedArray<int> {
protected:
virtual int default_value() const { return 0xBAADBABE; }
public:
static void test_biasedarray() {
const size_t REGION_SIZE_IN_WORDS = 512;
const size_t NUM_REGIONS = 20;
HeapWord* fake_heap = (HeapWord*)LP64_ONLY(0xBAAA00000) NOT_LP64(0xBA000000); // Any value that is non-zero
TestMappedArray array;
array.initialize(fake_heap, fake_heap + REGION_SIZE_IN_WORDS * NUM_REGIONS,
REGION_SIZE_IN_WORDS * HeapWordSize);
// Check address calculation (bounds)
assert(array.bottom_address_mapped() == fake_heap,
"bottom mapped address should be " PTR_FORMAT ", but is " PTR_FORMAT, p2i(fake_heap), p2i(array.bottom_address_mapped()));
assert(array.end_address_mapped() == (fake_heap + REGION_SIZE_IN_WORDS * NUM_REGIONS), "must be");
int* bottom = array.address_mapped_to(fake_heap);
assert((void*)bottom == (void*) array.base(), "must be");
int* end = array.address_mapped_to(fake_heap + REGION_SIZE_IN_WORDS * NUM_REGIONS);
assert((void*)end == (void*)(array.base() + array.length()), "must be");
// The entire array should contain default value elements
for (int* current = bottom; current < end; current++) {
assert(*current == array.default_value(), "must be");
}
// Test setting values in the table
HeapWord* region_start_address = fake_heap + REGION_SIZE_IN_WORDS * (NUM_REGIONS / 2);
HeapWord* region_end_address = fake_heap + (REGION_SIZE_IN_WORDS * (NUM_REGIONS / 2) + REGION_SIZE_IN_WORDS - 1);
// Set/get by address tests: invert some value; first retrieve one
int actual_value = array.get_by_index(NUM_REGIONS / 2);
array.set_by_index(NUM_REGIONS / 2, ~actual_value);
// Get the same value by address, should correspond to the start of the "region"
int value = array.get_by_address(region_start_address);
assert(value == ~actual_value, "must be");
// Get the same value by address, at one HeapWord before the start
value = array.get_by_address(region_start_address - 1);
assert(value == array.default_value(), "must be");
// Get the same value by address, at the end of the "region"
value = array.get_by_address(region_end_address);
assert(value == ~actual_value, "must be");
// Make sure the next value maps to another index
value = array.get_by_address(region_end_address + 1);
assert(value == array.default_value(), "must be");
// Reset the value in the array
array.set_by_address(region_start_address + (region_end_address - region_start_address) / 2, actual_value);
// The entire array should have the default value again
for (int* current = bottom; current < end; current++) {
assert(*current == array.default_value(), "must be");
}
// Set/get by index tests: invert some value
idx_t index = NUM_REGIONS / 2;
actual_value = array.get_by_index(index);
array.set_by_index(index, ~actual_value);
value = array.get_by_index(index);
assert(value == ~actual_value, "must be");
value = array.get_by_index(index - 1);
assert(value == array.default_value(), "must be");
value = array.get_by_index(index + 1);
assert(value == array.default_value(), "must be");
array.set_by_index(0, 0);
value = array.get_by_index(0);
assert(value == 0, "must be");
array.set_by_index(array.length() - 1, 0);
value = array.get_by_index(array.length() - 1);
assert(value == 0, "must be");
array.set_by_index(index, 0);
// The array should have three zeros, and default values otherwise
size_t num_zeros = 0;
for (int* current = bottom; current < end; current++) {
assert(*current == array.default_value() || *current == 0, "must be");
if (*current == 0) {
num_zeros++;
}
}
assert(num_zeros == 3, "must be");
}
};
void TestG1BiasedArray_test() {
TestMappedArray::test_biasedarray();
}
#endif #endif

View File

@ -648,15 +648,10 @@ void DefNewGeneration::collect(bool full,
// See: CardTableModRefBSForCTRS::non_clean_card_iterate_possibly_parallel. // See: CardTableModRefBSForCTRS::non_clean_card_iterate_possibly_parallel.
StrongRootsScope srs(0); StrongRootsScope srs(0);
gch->gen_process_roots(&srs, gch->young_process_roots(&srs,
GenCollectedHeap::YoungGen, &fsc_with_no_gc_barrier,
true, // Process younger gens, if any, &fsc_with_gc_barrier,
// as strong roots. &cld_scan_closure);
GenCollectedHeap::SO_ScavengeCodeCache,
GenCollectedHeap::StrongAndWeakRoots,
&fsc_with_no_gc_barrier,
&fsc_with_gc_barrier,
&cld_scan_closure);
} }
// "evacuate followers". // "evacuate followers".

View File

@ -196,14 +196,13 @@ void GenMarkSweep::mark_sweep_phase1(bool clear_all_softrefs) {
{ {
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->full_process_roots(&srs,
GenCollectedHeap::OldGen, false, // not the adjust phase
false, // Younger gens are not roots. GenCollectedHeap::SO_None,
GenCollectedHeap::SO_None, ClassUnloading, // only strong roots if ClassUnloading
ClassUnloading, // is enabled
&follow_root_closure, &follow_root_closure,
&follow_root_closure, &follow_cld_closure);
&follow_cld_closure);
} }
// Process reference objects found during marking // Process reference objects found during marking
@ -295,14 +294,12 @@ void GenMarkSweep::mark_sweep_phase3() {
{ {
StrongRootsScope srs(1); StrongRootsScope srs(1);
gch->gen_process_roots(&srs, gch->full_process_roots(&srs,
GenCollectedHeap::OldGen, true, // this is the adjust phase
false, // Younger gens are not roots. GenCollectedHeap::SO_AllCodeCache,
GenCollectedHeap::SO_AllCodeCache, false, // all roots
GenCollectedHeap::StrongAndWeakRoots, &adjust_pointer_closure,
&adjust_pointer_closure, &adjust_cld_closure);
&adjust_pointer_closure,
&adjust_cld_closure);
} }
gch->gen_process_weak_roots(&adjust_pointer_closure); gch->gen_process_weak_roots(&adjust_pointer_closure);

View File

@ -35,7 +35,6 @@
#include "prims/jni_md.h" #include "prims/jni_md.h"
#include "utilities/ticks.hpp" #include "utilities/ticks.hpp"
#define LOG_STOP_TIME_FORMAT "(%.3fs, %.3fs) %.3fms"
#define LOG_STOP_HEAP_FORMAT SIZE_FORMAT "M->" SIZE_FORMAT "M(" SIZE_FORMAT "M)" #define LOG_STOP_HEAP_FORMAT SIZE_FORMAT "M->" SIZE_FORMAT "M(" SIZE_FORMAT "M)"
inline void GCTraceTimeImpl::log_start(jlong start_counter) { inline void GCTraceTimeImpl::log_start(jlong start_counter) {
@ -46,7 +45,7 @@ inline void GCTraceTimeImpl::log_start(jlong start_counter) {
if (_gc_cause != GCCause::_no_gc) { if (_gc_cause != GCCause::_no_gc) {
out.print(" (%s)", GCCause::to_string(_gc_cause)); out.print(" (%s)", GCCause::to_string(_gc_cause));
} }
out.print_cr(" (%.3fs)", TimeHelper::counter_to_seconds(start_counter)); out.cr();
} }
} }
@ -71,7 +70,7 @@ inline void GCTraceTimeImpl::log_stop(jlong start_counter, jlong stop_counter) {
out.print(" " LOG_STOP_HEAP_FORMAT, used_before_m, used_m, capacity_m); out.print(" " LOG_STOP_HEAP_FORMAT, used_before_m, used_m, capacity_m);
} }
out.print_cr(" " LOG_STOP_TIME_FORMAT, start_time_in_secs, stop_time_in_secs, duration_in_ms); out.print_cr(" %.3fms", duration_in_ms);
} }
inline void GCTraceTimeImpl::time_stamp(Ticks& ticks) { inline void GCTraceTimeImpl::time_stamp(Ticks& ticks) {
@ -117,7 +116,7 @@ template <LogLevelType Level, LogTagType T0, LogTagType T1, LogTagType T2, LogTa
GCTraceConcTimeImpl<Level, T0, T1, T2, T3, T4, GuardTag>::GCTraceConcTimeImpl(const char* title) : GCTraceConcTimeImpl<Level, T0, T1, T2, T3, T4, GuardTag>::GCTraceConcTimeImpl(const char* title) :
_enabled(LogImpl<T0, T1, T2, T3, T4, GuardTag>::is_level(Level)), _start_time(os::elapsed_counter()), _title(title) { _enabled(LogImpl<T0, T1, T2, T3, T4, GuardTag>::is_level(Level)), _start_time(os::elapsed_counter()), _title(title) {
if (_enabled) { if (_enabled) {
LogImpl<T0, T1, T2, T3, T4>::template write<Level>("%s (%.3fs)", _title, TimeHelper::counter_to_seconds(_start_time)); LogImpl<T0, T1, T2, T3, T4>::template write<Level>("%s", _title);
} }
} }
@ -125,11 +124,8 @@ template <LogLevelType Level, LogTagType T0, LogTagType T1, LogTagType T2, LogTa
GCTraceConcTimeImpl<Level, T0, T1, T2, T3, T4, GuardTag>::~GCTraceConcTimeImpl() { GCTraceConcTimeImpl<Level, T0, T1, T2, T3, T4, GuardTag>::~GCTraceConcTimeImpl() {
if (_enabled) { if (_enabled) {
jlong stop_time = os::elapsed_counter(); jlong stop_time = os::elapsed_counter();
LogImpl<T0, T1, T2, T3, T4>::template write<Level>("%s " LOG_STOP_TIME_FORMAT, LogImpl<T0, T1, T2, T3, T4>::template write<Level>("%s %0.3fms", _title,
_title, TimeHelper::counter_to_millis(stop_time - _start_time));
TimeHelper::counter_to_seconds(_start_time),
TimeHelper::counter_to_seconds(stop_time),
TimeHelper::counter_to_millis(stop_time - _start_time));
} }
} }

View File

@ -613,16 +613,6 @@ void GenCollectedHeap::process_roots(StrongRootsScope* scope,
SystemDictionary::roots_oops_do(strong_roots, weak_roots); SystemDictionary::roots_oops_do(strong_roots, weak_roots);
} }
// All threads execute the following. A specific chunk of buckets
// from the StringTable are the individual tasks.
if (weak_roots != NULL) {
if (is_par) {
StringTable::possibly_parallel_oops_do(weak_roots);
} else {
StringTable::oops_do(weak_roots);
}
}
if (!_process_strong_tasks->is_task_claimed(GCH_PS_CodeCache_oops_do)) { if (!_process_strong_tasks->is_task_claimed(GCH_PS_CodeCache_oops_do)) {
if (so & SO_ScavengeCodeCache) { if (so & SO_ScavengeCodeCache) {
assert(code_roots != NULL, "must supply closure for code cache"); assert(code_roots != NULL, "must supply closure for code cache");
@ -644,46 +634,82 @@ void GenCollectedHeap::process_roots(StrongRootsScope* scope,
} }
} }
void GenCollectedHeap::gen_process_roots(StrongRootsScope* scope, void GenCollectedHeap::process_string_table_roots(StrongRootsScope* scope,
GenerationType type, OopClosure* root_closure) {
assert(root_closure != NULL, "Must be set");
// All threads execute the following. A specific chunk of buckets
// from the StringTable are the individual tasks.
if (scope->n_threads() > 1) {
StringTable::possibly_parallel_oops_do(root_closure);
} else {
StringTable::oops_do(root_closure);
}
}
void GenCollectedHeap::young_process_roots(StrongRootsScope* scope,
OopsInGenClosure* root_closure,
OopsInGenClosure* old_gen_closure,
CLDClosure* cld_closure) {
MarkingCodeBlobClosure mark_code_closure(root_closure, CodeBlobToOopClosure::FixRelocations);
process_roots(scope, SO_ScavengeCodeCache, root_closure, root_closure,
cld_closure, cld_closure, &mark_code_closure);
process_string_table_roots(scope, root_closure);
if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
root_closure->reset_generation();
}
// When collection is parallel, all threads get to cooperate to do
// old generation scanning.
old_gen_closure->set_generation(_old_gen);
rem_set()->younger_refs_iterate(_old_gen, old_gen_closure, scope->n_threads());
old_gen_closure->reset_generation();
_process_strong_tasks->all_tasks_completed(scope->n_threads());
}
void GenCollectedHeap::cms_process_roots(StrongRootsScope* scope,
bool young_gen_as_roots, bool young_gen_as_roots,
ScanningOption so, ScanningOption so,
bool only_strong_roots, bool only_strong_roots,
OopsInGenClosure* not_older_gens, OopsInGenClosure* root_closure,
OopsInGenClosure* older_gens,
CLDClosure* cld_closure) { CLDClosure* cld_closure) {
const bool is_adjust_phase = !only_strong_roots && !young_gen_as_roots; MarkingCodeBlobClosure mark_code_closure(root_closure, !CodeBlobToOopClosure::FixRelocations);
OopsInGenClosure* weak_roots = only_strong_roots ? NULL : root_closure;
bool is_moving_collection = false;
if (type == YoungGen || is_adjust_phase) {
// young collections are always moving
is_moving_collection = true;
}
MarkingCodeBlobClosure mark_code_closure(not_older_gens, is_moving_collection);
OopsInGenClosure* weak_roots = only_strong_roots ? NULL : not_older_gens;
CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure; CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
process_roots(scope, so, process_roots(scope, so, root_closure, weak_roots, cld_closure, weak_cld_closure, &mark_code_closure);
not_older_gens, weak_roots, if (!only_strong_roots) {
cld_closure, weak_cld_closure, process_string_table_roots(scope, root_closure);
&mark_code_closure);
if (young_gen_as_roots) {
if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
if (type == OldGen) {
not_older_gens->set_generation(_young_gen);
_young_gen->oop_iterate(not_older_gens);
}
not_older_gens->reset_generation();
}
} }
// When collection is parallel, all threads get to cooperate to do
// old generation scanning. if (young_gen_as_roots &&
if (type == YoungGen) { !_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
older_gens->set_generation(_old_gen); root_closure->set_generation(_young_gen);
rem_set()->younger_refs_iterate(_old_gen, older_gens, scope->n_threads()); _young_gen->oop_iterate(root_closure);
older_gens->reset_generation(); root_closure->reset_generation();
}
_process_strong_tasks->all_tasks_completed(scope->n_threads());
}
void GenCollectedHeap::full_process_roots(StrongRootsScope* scope,
bool is_adjust_phase,
ScanningOption so,
bool only_strong_roots,
OopsInGenClosure* root_closure,
CLDClosure* cld_closure) {
MarkingCodeBlobClosure mark_code_closure(root_closure, is_adjust_phase);
OopsInGenClosure* weak_roots = only_strong_roots ? NULL : root_closure;
CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
process_roots(scope, so, root_closure, weak_roots, cld_closure, weak_cld_closure, &mark_code_closure);
if (is_adjust_phase) {
// We never treat the string table as roots during marking
// for the full gc, so we only need to process it during
// the adjust phase.
process_string_table_roots(scope, root_closure);
} }
_process_strong_tasks->all_tasks_completed(scope->n_threads()); _process_strong_tasks->all_tasks_completed(scope->n_threads());

View File

@ -374,16 +374,7 @@ public:
// asserted to be this type. // asserted to be this type.
static GenCollectedHeap* heap(); static GenCollectedHeap* heap();
// Invoke the "do_oop" method of one of the closures "not_older_gens" // The ScanningOption determines which of the roots
// or "older_gens" on root locations for the generations depending on
// the type. (The "older_gens" closure is used for scanning references
// from older generations; "not_older_gens" is used everywhere else.)
// If "younger_gens_as_roots" is false, younger generations are
// not scanned as roots; in this case, the caller must be arranging to
// scan the younger generations itself. (For example, a generation might
// explicitly mark reachable objects in younger generations, to avoid
// excess storage retention.)
// The "so" argument determines which of the roots
// the closure is applied to: // the closure is applied to:
// "SO_None" does none; // "SO_None" does none;
enum ScanningOption { enum ScanningOption {
@ -401,19 +392,34 @@ public:
CLDClosure* weak_cld_closure, CLDClosure* weak_cld_closure,
CodeBlobToOopClosure* code_roots); CodeBlobToOopClosure* code_roots);
public: void process_string_table_roots(StrongRootsScope* scope,
static const bool StrongAndWeakRoots = false; OopClosure* root_closure);
static const bool StrongRootsOnly = true;
void gen_process_roots(StrongRootsScope* scope, public:
GenerationType type, void young_process_roots(StrongRootsScope* scope,
OopsInGenClosure* root_closure,
OopsInGenClosure* old_gen_closure,
CLDClosure* cld_closure);
// If "young_gen_as_roots" is false, younger generations are
// not scanned as roots; in this case, the caller must be arranging to
// scan the younger generations itself. (For example, a generation might
// explicitly mark reachable objects in younger generations, to avoid
// excess storage retention.)
void cms_process_roots(StrongRootsScope* scope,
bool young_gen_as_roots, bool young_gen_as_roots,
ScanningOption so, ScanningOption so,
bool only_strong_roots, bool only_strong_roots,
OopsInGenClosure* not_older_gens, OopsInGenClosure* root_closure,
OopsInGenClosure* older_gens,
CLDClosure* cld_closure); CLDClosure* cld_closure);
void full_process_roots(StrongRootsScope* scope,
bool is_adjust_phase,
ScanningOption so,
bool only_strong_roots,
OopsInGenClosure* root_closure,
CLDClosure* cld_closure);
// Apply "root_closure" to all the weak roots of the system. // Apply "root_closure" to all the weak roots of the system.
// These include JNI weak roots, string table, // These include JNI weak roots, string table,
// and referents of reachable weak refs. // and referents of reachable weak refs.

View File

@ -40,6 +40,7 @@
class InvocationCounter VALUE_OBJ_CLASS_SPEC { class InvocationCounter VALUE_OBJ_CLASS_SPEC {
friend class VMStructs; friend class VMStructs;
friend class JVMCIVMStructs;
friend class ciReplay; friend class ciReplay;
private: // bit no: |31 3| 2 | 1 0 | private: // bit no: |31 3| 2 | 1 0 |
unsigned int _counter; // format: [count|carry|state] unsigned int _counter; // format: [count|carry|state]

View File

@ -858,8 +858,10 @@ methodHandle LinkResolver::resolve_interface_method(const LinkInfo& link_info, B
} }
if (log_develop_is_enabled(Trace, itables)) { if (log_develop_is_enabled(Trace, itables)) {
trace_method_resolution("invokeinterface resolved method: caller-class", char buf[200];
link_info.current_klass(), resolved_klass, jio_snprintf(buf, sizeof(buf), "%s resolved interface method: caller-class:",
Bytecodes::name(code));
trace_method_resolution(buf, link_info.current_klass(), resolved_klass,
resolved_method, true); resolved_method, true);
} }
@ -1424,7 +1426,7 @@ void LinkResolver::runtime_resolve_interface_method(CallInfo& result,
} }
if (log_develop_is_enabled(Trace, itables)) { if (log_develop_is_enabled(Trace, itables)) {
trace_method_resolution("invokeinterface selected method: receiver-class", trace_method_resolution("invokeinterface selected method: receiver-class:",
recv_klass, resolved_klass, sel_method, true); recv_klass, resolved_klass, sel_method, true);
} }
// setup result // setup result

View File

@ -172,7 +172,7 @@ OopMap* CodeInstaller::create_oop_map(Handle debug_info, TRAPS) {
return map; return map;
} }
void* CodeInstaller::record_metadata_reference(Handle constant, TRAPS) { void* CodeInstaller::record_metadata_reference(CodeSection* section, address dest, Handle constant, TRAPS) {
/* /*
* This method needs to return a raw (untyped) pointer, since the value of a pointer to the base * This method needs to return a raw (untyped) pointer, since the value of a pointer to the base
* class is in general not equal to the pointer of the subclass. When patching metaspace pointers, * class is in general not equal to the pointer of the subclass. When patching metaspace pointers,
@ -184,12 +184,14 @@ void* CodeInstaller::record_metadata_reference(Handle constant, TRAPS) {
Klass* klass = java_lang_Class::as_Klass(HotSpotResolvedObjectTypeImpl::javaClass(obj)); Klass* klass = java_lang_Class::as_Klass(HotSpotResolvedObjectTypeImpl::javaClass(obj));
assert(!HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected compressed klass pointer %s @ " INTPTR_FORMAT, klass->name()->as_C_string(), p2i(klass)); assert(!HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected compressed klass pointer %s @ " INTPTR_FORMAT, klass->name()->as_C_string(), p2i(klass));
int index = _oop_recorder->find_index(klass); int index = _oop_recorder->find_index(klass);
section->relocate(dest, metadata_Relocation::spec(index));
TRACE_jvmci_3("metadata[%d of %d] = %s", index, _oop_recorder->metadata_count(), klass->name()->as_C_string()); TRACE_jvmci_3("metadata[%d of %d] = %s", index, _oop_recorder->metadata_count(), klass->name()->as_C_string());
return klass; return klass;
} else if (obj->is_a(HotSpotResolvedJavaMethodImpl::klass())) { } else if (obj->is_a(HotSpotResolvedJavaMethodImpl::klass())) {
Method* method = (Method*) (address) HotSpotResolvedJavaMethodImpl::metaspaceMethod(obj); Method* method = (Method*) (address) HotSpotResolvedJavaMethodImpl::metaspaceMethod(obj);
assert(!HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected compressed method pointer %s @ " INTPTR_FORMAT, method->name()->as_C_string(), p2i(method)); assert(!HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected compressed method pointer %s @ " INTPTR_FORMAT, method->name()->as_C_string(), p2i(method));
int index = _oop_recorder->find_index(method); int index = _oop_recorder->find_index(method);
section->relocate(dest, metadata_Relocation::spec(index));
TRACE_jvmci_3("metadata[%d of %d] = %s", index, _oop_recorder->metadata_count(), method->name()->as_C_string()); TRACE_jvmci_3("metadata[%d of %d] = %s", index, _oop_recorder->metadata_count(), method->name()->as_C_string());
return method; return method;
} else { } else {
@ -198,7 +200,7 @@ void* CodeInstaller::record_metadata_reference(Handle constant, TRAPS) {
} }
#ifdef _LP64 #ifdef _LP64
narrowKlass CodeInstaller::record_narrow_metadata_reference(Handle constant, TRAPS) { narrowKlass CodeInstaller::record_narrow_metadata_reference(CodeSection* section, address dest, Handle constant, TRAPS) {
oop obj = HotSpotMetaspaceConstantImpl::metaspaceObject(constant); oop obj = HotSpotMetaspaceConstantImpl::metaspaceObject(constant);
assert(HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected uncompressed pointer"); assert(HotSpotMetaspaceConstantImpl::compressed(constant), "unexpected uncompressed pointer");
@ -208,6 +210,7 @@ narrowKlass CodeInstaller::record_narrow_metadata_reference(Handle constant, TRA
Klass* klass = java_lang_Class::as_Klass(HotSpotResolvedObjectTypeImpl::javaClass(obj)); Klass* klass = java_lang_Class::as_Klass(HotSpotResolvedObjectTypeImpl::javaClass(obj));
int index = _oop_recorder->find_index(klass); int index = _oop_recorder->find_index(klass);
section->relocate(dest, metadata_Relocation::spec(index));
TRACE_jvmci_3("narrowKlass[%d of %d] = %s", index, _oop_recorder->metadata_count(), klass->name()->as_C_string()); TRACE_jvmci_3("narrowKlass[%d of %d] = %s", index, _oop_recorder->metadata_count(), klass->name()->as_C_string());
return Klass::encode_klass(klass); return Klass::encode_klass(klass);
} }
@ -701,12 +704,12 @@ JVMCIEnv::CodeInstallResult CodeInstaller::initialize_buffer(CodeBuffer& buffer,
if (constant->is_a(HotSpotMetaspaceConstantImpl::klass())) { if (constant->is_a(HotSpotMetaspaceConstantImpl::klass())) {
if (HotSpotMetaspaceConstantImpl::compressed(constant)) { if (HotSpotMetaspaceConstantImpl::compressed(constant)) {
#ifdef _LP64 #ifdef _LP64
*((narrowKlass*) dest) = record_narrow_metadata_reference(constant, CHECK_OK); *((narrowKlass*) dest) = record_narrow_metadata_reference(_constants, dest, constant, CHECK_OK);
#else #else
JVMCI_ERROR_OK("unexpected compressed Klass* in 32-bit mode"); JVMCI_ERROR_OK("unexpected compressed Klass* in 32-bit mode");
#endif #endif
} else { } else {
*((void**) dest) = record_metadata_reference(constant, CHECK_OK); *((void**) dest) = record_metadata_reference(_constants, dest, constant, CHECK_OK);
} }
} else if (constant->is_a(HotSpotObjectConstantImpl::klass())) { } else if (constant->is_a(HotSpotObjectConstantImpl::klass())) {
Handle obj = HotSpotObjectConstantImpl::object(constant); Handle obj = HotSpotObjectConstantImpl::object(constant);

View File

@ -189,9 +189,9 @@ protected:
ScopeValue* get_scope_value(Handle value, BasicType type, GrowableArray<ScopeValue*>* objects, ScopeValue* &second, TRAPS); ScopeValue* get_scope_value(Handle value, BasicType type, GrowableArray<ScopeValue*>* objects, ScopeValue* &second, TRAPS);
MonitorValue* get_monitor_value(Handle value, GrowableArray<ScopeValue*>* objects, TRAPS); MonitorValue* get_monitor_value(Handle value, GrowableArray<ScopeValue*>* objects, TRAPS);
void* record_metadata_reference(Handle constant, TRAPS); void* record_metadata_reference(CodeSection* section, address dest, Handle constant, TRAPS);
#ifdef _LP64 #ifdef _LP64
narrowKlass record_narrow_metadata_reference(Handle constant, TRAPS); narrowKlass record_narrow_metadata_reference(CodeSection* section, address dest, Handle constant, TRAPS);
#endif #endif
// extract the fields of the HotSpotCompiledCode // extract the fields of the HotSpotCompiledCode

View File

@ -640,8 +640,6 @@ JVM_ENTRY(jobject, JVM_GetJVMCIRuntime(JNIEnv *env, jclass c))
JVM_END JVM_END
Handle JVMCIRuntime::callStatic(const char* className, const char* methodName, const char* signature, JavaCallArguments* args, TRAPS) { Handle JVMCIRuntime::callStatic(const char* className, const char* methodName, const char* signature, JavaCallArguments* args, TRAPS) {
guarantee(!_HotSpotJVMCIRuntime_initialized, "cannot reinitialize HotSpotJVMCIRuntime");
TempNewSymbol name = SymbolTable::new_symbol(className, CHECK_(Handle())); TempNewSymbol name = SymbolTable::new_symbol(className, CHECK_(Handle()));
KlassHandle klass = SystemDictionary::resolve_or_fail(name, true, CHECK_(Handle())); KlassHandle klass = SystemDictionary::resolve_or_fail(name, true, CHECK_(Handle()));
TempNewSymbol runtime = SymbolTable::new_symbol(methodName, CHECK_(Handle())); TempNewSymbol runtime = SymbolTable::new_symbol(methodName, CHECK_(Handle()));
@ -656,42 +654,37 @@ Handle JVMCIRuntime::callStatic(const char* className, const char* methodName, c
} }
void JVMCIRuntime::initialize_HotSpotJVMCIRuntime(TRAPS) { void JVMCIRuntime::initialize_HotSpotJVMCIRuntime(TRAPS) {
if (JNIHandles::resolve(_HotSpotJVMCIRuntime_instance) == NULL) { guarantee(!_HotSpotJVMCIRuntime_initialized, "cannot reinitialize HotSpotJVMCIRuntime");
ResourceMark rm; JVMCIRuntime::initialize_well_known_classes(CHECK);
#ifdef ASSERT // This should only be called in the context of the JVMCI class being initialized
// This should only be called in the context of the JVMCI class being initialized instanceKlassHandle klass = InstanceKlass::cast(SystemDictionary::JVMCI_klass());
TempNewSymbol name = SymbolTable::new_symbol("jdk/vm/ci/runtime/JVMCI", CHECK); guarantee(klass->is_being_initialized() && klass->is_reentrant_initialization(THREAD),
Klass* k = SystemDictionary::resolve_or_null(name, CHECK); "HotSpotJVMCIRuntime initialization should only be triggered through JVMCI initialization");
instanceKlassHandle klass = InstanceKlass::cast(k);
assert(klass->is_being_initialized() && klass->is_reentrant_initialization(THREAD),
"HotSpotJVMCIRuntime initialization should only be triggered through JVMCI initialization");
#endif
Handle result = callStatic("jdk/vm/ci/hotspot/HotSpotJVMCIRuntime", Handle result = callStatic("jdk/vm/ci/hotspot/HotSpotJVMCIRuntime",
"runtime", "runtime",
"()Ljdk/vm/ci/hotspot/HotSpotJVMCIRuntime;", NULL, CHECK); "()Ljdk/vm/ci/hotspot/HotSpotJVMCIRuntime;", NULL, CHECK);
objArrayOop trivial_prefixes = HotSpotJVMCIRuntime::trivialPrefixes(result); objArrayOop trivial_prefixes = HotSpotJVMCIRuntime::trivialPrefixes(result);
if (trivial_prefixes != NULL) { if (trivial_prefixes != NULL) {
char** prefixes = NEW_C_HEAP_ARRAY(char*, trivial_prefixes->length(), mtCompiler); char** prefixes = NEW_C_HEAP_ARRAY(char*, trivial_prefixes->length(), mtCompiler);
for (int i = 0; i < trivial_prefixes->length(); i++) { for (int i = 0; i < trivial_prefixes->length(); i++) {
oop str = trivial_prefixes->obj_at(i); oop str = trivial_prefixes->obj_at(i);
if (str == NULL) { if (str == NULL) {
THROW(vmSymbols::java_lang_NullPointerException()); THROW(vmSymbols::java_lang_NullPointerException());
} else { } else {
prefixes[i] = strdup(java_lang_String::as_utf8_string(str)); prefixes[i] = strdup(java_lang_String::as_utf8_string(str));
}
} }
_trivial_prefixes = prefixes;
_trivial_prefixes_count = trivial_prefixes->length();
} }
int adjustment = HotSpotJVMCIRuntime::compilationLevelAdjustment(result); _trivial_prefixes = prefixes;
assert(adjustment >= JVMCIRuntime::none && _trivial_prefixes_count = trivial_prefixes->length();
adjustment <= JVMCIRuntime::by_full_signature,
"compilation level adjustment out of bounds");
_comp_level_adjustment = (CompLevelAdjustment) adjustment;
_HotSpotJVMCIRuntime_initialized = true;
_HotSpotJVMCIRuntime_instance = JNIHandles::make_global(result());
} }
int adjustment = HotSpotJVMCIRuntime::compilationLevelAdjustment(result);
assert(adjustment >= JVMCIRuntime::none &&
adjustment <= JVMCIRuntime::by_full_signature,
"compilation level adjustment out of bounds");
_comp_level_adjustment = (CompLevelAdjustment) adjustment;
_HotSpotJVMCIRuntime_initialized = true;
_HotSpotJVMCIRuntime_instance = JNIHandles::make_global(result());
} }
void JVMCIRuntime::initialize_JVMCI(TRAPS) { void JVMCIRuntime::initialize_JVMCI(TRAPS) {

View File

@ -85,6 +85,7 @@ bool JVMCIGlobals::check_jvmci_flags_are_consistent() {
CHECK_NOT_SET(JVMCIUseFastLocking, EnableJVMCI) CHECK_NOT_SET(JVMCIUseFastLocking, EnableJVMCI)
CHECK_NOT_SET(JVMCINMethodSizeLimit, EnableJVMCI) CHECK_NOT_SET(JVMCINMethodSizeLimit, EnableJVMCI)
CHECK_NOT_SET(MethodProfileWidth, EnableJVMCI) CHECK_NOT_SET(MethodProfileWidth, EnableJVMCI)
CHECK_NOT_SET(JVMCIPrintProperties, EnableJVMCI)
CHECK_NOT_SET(TraceUncollectedSpeculations, EnableJVMCI) CHECK_NOT_SET(TraceUncollectedSpeculations, EnableJVMCI)
#ifndef PRODUCT #ifndef PRODUCT

View File

@ -49,6 +49,9 @@
experimental(bool, UseJVMCICompiler, false, \ experimental(bool, UseJVMCICompiler, false, \
"Use JVMCI as the default compiler") \ "Use JVMCI as the default compiler") \
\ \
experimental(bool, JVMCIPrintProperties, false, \
"Prints properties used by the JVMCI compiler") \
\
experimental(bool, BootstrapJVMCI, false, \ experimental(bool, BootstrapJVMCI, false, \
"Bootstrap JVMCI before running Java main method") \ "Bootstrap JVMCI before running Java main method") \
\ \

View File

@ -29,6 +29,7 @@
#else #else
#define JVMCI_WK_KLASSES_DO(do_klass) \ #define JVMCI_WK_KLASSES_DO(do_klass) \
/* JVMCI classes. These are loaded on-demand. */ \ /* JVMCI classes. These are loaded on-demand. */ \
do_klass(JVMCI_klass, jdk_vm_ci_runtime_JVMCI, Jvmci) \
do_klass(HotSpotCompiledCode_klass, jdk_vm_ci_hotspot_HotSpotCompiledCode, Jvmci) \ do_klass(HotSpotCompiledCode_klass, jdk_vm_ci_hotspot_HotSpotCompiledCode, Jvmci) \
do_klass(HotSpotCompiledCode_Comment_klass, jdk_vm_ci_hotspot_HotSpotCompiledCode_Comment, Jvmci) \ do_klass(HotSpotCompiledCode_Comment_klass, jdk_vm_ci_hotspot_HotSpotCompiledCode_Comment, Jvmci) \
do_klass(HotSpotCompiledNmethod_klass, jdk_vm_ci_hotspot_HotSpotCompiledNmethod, Jvmci) \ do_klass(HotSpotCompiledNmethod_klass, jdk_vm_ci_hotspot_HotSpotCompiledNmethod, Jvmci) \

View File

@ -169,6 +169,8 @@
nonstatic_field(JVMCIEnv, _task, CompileTask*) \ nonstatic_field(JVMCIEnv, _task, CompileTask*) \
nonstatic_field(JVMCIEnv, _jvmti_can_hotswap_or_post_breakpoint, bool) \ nonstatic_field(JVMCIEnv, _jvmti_can_hotswap_or_post_breakpoint, bool) \
\ \
nonstatic_field(InvocationCounter, _counter, unsigned int) \
\
nonstatic_field(Klass, _secondary_super_cache, Klass*) \ nonstatic_field(Klass, _secondary_super_cache, Klass*) \
nonstatic_field(Klass, _secondary_supers, Array<Klass*>*) \ nonstatic_field(Klass, _secondary_supers, Array<Klass*>*) \
nonstatic_field(Klass, _super, Klass*) \ nonstatic_field(Klass, _super, Klass*) \
@ -199,13 +201,34 @@
volatile_nonstatic_field(Method, _code, CompiledMethod*) \ volatile_nonstatic_field(Method, _code, CompiledMethod*) \
volatile_nonstatic_field(Method, _from_compiled_entry, address) \ volatile_nonstatic_field(Method, _from_compiled_entry, address) \
\ \
nonstatic_field(MethodCounters, _nmethod_age, int) \
nonstatic_field(MethodCounters, _interpreter_invocation_limit, int) \
nonstatic_field(MethodCounters, _interpreter_backward_branch_limit, int) \
nonstatic_field(MethodCounters, _interpreter_profile_limit, int) \
nonstatic_field(MethodCounters, _invoke_mask, int) \
nonstatic_field(MethodCounters, _backedge_mask, int) \
nonstatic_field(MethodCounters, _interpreter_invocation_count, int) \
nonstatic_field(MethodCounters, _interpreter_throwout_count, u2) \
JVMTI_ONLY(nonstatic_field(MethodCounters, _number_of_breakpoints, u2)) \
nonstatic_field(MethodCounters, _invocation_counter, InvocationCounter) \ nonstatic_field(MethodCounters, _invocation_counter, InvocationCounter) \
nonstatic_field(MethodCounters, _backedge_counter, InvocationCounter) \ nonstatic_field(MethodCounters, _backedge_counter, InvocationCounter) \
\ \
nonstatic_field(MethodData, _size, int) \ nonstatic_field(MethodData, _size, int) \
nonstatic_field(MethodData, _method, Method*) \
nonstatic_field(MethodData, _data_size, int) \ nonstatic_field(MethodData, _data_size, int) \
nonstatic_field(MethodData, _data[0], intptr_t) \ nonstatic_field(MethodData, _data[0], intptr_t) \
nonstatic_field(MethodData, _parameters_type_data_di, int) \
nonstatic_field(MethodData, _nof_decompiles, uint) \
nonstatic_field(MethodData, _nof_overflow_recompiles, uint) \
nonstatic_field(MethodData, _nof_overflow_traps, uint) \
nonstatic_field(MethodData, _trap_hist._array[0], u1) \ nonstatic_field(MethodData, _trap_hist._array[0], u1) \
nonstatic_field(MethodData, _eflags, intx) \
nonstatic_field(MethodData, _arg_local, intx) \
nonstatic_field(MethodData, _arg_stack, intx) \
nonstatic_field(MethodData, _arg_returned, intx) \
nonstatic_field(MethodData, _tenure_traps, uint) \
nonstatic_field(MethodData, _invoke_mask, int) \
nonstatic_field(MethodData, _backedge_mask, int) \
nonstatic_field(MethodData, _jvmci_ir_size, int) \ nonstatic_field(MethodData, _jvmci_ir_size, int) \
\ \
nonstatic_field(nmethod, _verified_entry_point, address) \ nonstatic_field(nmethod, _verified_entry_point, address) \
@ -290,6 +313,7 @@
declare_toplevel_type(ExceptionTableElement) \ declare_toplevel_type(ExceptionTableElement) \
declare_toplevel_type(Flag) \ declare_toplevel_type(Flag) \
declare_toplevel_type(Flag*) \ declare_toplevel_type(Flag*) \
declare_toplevel_type(InvocationCounter) \
declare_toplevel_type(JVMCIEnv) \ declare_toplevel_type(JVMCIEnv) \
declare_toplevel_type(LocalVariableTableElement) \ declare_toplevel_type(LocalVariableTableElement) \
declare_toplevel_type(narrowKlass) \ declare_toplevel_type(narrowKlass) \
@ -688,7 +712,6 @@
declare_constant(VM_Version::sun4v_m) \ declare_constant(VM_Version::sun4v_m) \
declare_constant(VM_Version::blk_init_instructions_m) \ declare_constant(VM_Version::blk_init_instructions_m) \
declare_constant(VM_Version::fmaf_instructions_m) \ declare_constant(VM_Version::fmaf_instructions_m) \
declare_constant(VM_Version::fmau_instructions_m) \
declare_constant(VM_Version::sparc64_family_m) \ declare_constant(VM_Version::sparc64_family_m) \
declare_constant(VM_Version::M_family_m) \ declare_constant(VM_Version::M_family_m) \
declare_constant(VM_Version::T_family_m) \ declare_constant(VM_Version::T_family_m) \

View File

@ -29,6 +29,7 @@
#define JVMCI_VM_SYMBOLS_DO(template, do_alias) #define JVMCI_VM_SYMBOLS_DO(template, do_alias)
#else #else
#define JVMCI_VM_SYMBOLS_DO(template, do_alias) \ #define JVMCI_VM_SYMBOLS_DO(template, do_alias) \
template(jdk_vm_ci_runtime_JVMCI, "jdk/vm/ci/runtime/JVMCI") \
template(jdk_vm_ci_hotspot_HotSpotCompiledCode, "jdk/vm/ci/hotspot/HotSpotCompiledCode") \ template(jdk_vm_ci_hotspot_HotSpotCompiledCode, "jdk/vm/ci/hotspot/HotSpotCompiledCode") \
template(jdk_vm_ci_hotspot_HotSpotCompiledCode_Comment, "jdk/vm/ci/hotspot/HotSpotCompiledCode$Comment") \ template(jdk_vm_ci_hotspot_HotSpotCompiledCode_Comment, "jdk/vm/ci/hotspot/HotSpotCompiledCode$Comment") \
template(jdk_vm_ci_hotspot_HotSpotCompiledNmethod, "jdk/vm/ci/hotspot/HotSpotCompiledNmethod") \ template(jdk_vm_ci_hotspot_HotSpotCompiledNmethod, "jdk/vm/ci/hotspot/HotSpotCompiledNmethod") \

File diff suppressed because it is too large Load Diff

View File

@ -97,11 +97,7 @@ static bool is_regular_file(const char* filename) {
if (ret != 0) { if (ret != 0) {
return false; return false;
} }
#ifdef _WINDOWS return (st.st_mode & S_IFMT) == S_IFREG;
return (st.st_mode & S_IFMT) == _S_IFREG;
#else
return S_ISREG(st.st_mode);
#endif
} }
// Try to find the next number that should be used for file rotation. // Try to find the next number that should be used for file rotation.

View File

@ -40,6 +40,7 @@
LOG_TAG(attach) \ LOG_TAG(attach) \
LOG_TAG(barrier) \ LOG_TAG(barrier) \
LOG_TAG(biasedlocking) \ LOG_TAG(biasedlocking) \
LOG_TAG(blocks) \
LOG_TAG(bot) \ LOG_TAG(bot) \
LOG_TAG(breakpoint) \ LOG_TAG(breakpoint) \
LOG_TAG(census) \ LOG_TAG(census) \
@ -105,6 +106,7 @@
LOG_TAG(scavenge) \ LOG_TAG(scavenge) \
LOG_TAG(scrub) \ LOG_TAG(scrub) \
LOG_TAG(stacktrace) \ LOG_TAG(stacktrace) \
LOG_TAG(stackwalk) \
LOG_TAG(start) \ LOG_TAG(start) \
LOG_TAG(startuptime) \ LOG_TAG(startuptime) \
LOG_TAG(state) \ LOG_TAG(state) \

View File

@ -263,7 +263,7 @@ void FileMapInfo::allocate_classpath_entry_table() {
} else { } else {
struct stat st; struct stat st;
if (os::stat(name, &st) == 0) { if (os::stat(name, &st) == 0) {
if ((st.st_mode & S_IFDIR) == S_IFDIR) { if ((st.st_mode & S_IFMT) == S_IFDIR) {
if (!os::dir_is_empty(name)) { if (!os::dir_is_empty(name)) {
ClassLoader::exit_with_path_failure( ClassLoader::exit_with_path_failure(
"Cannot have non-empty directory in archived classpaths", name); "Cannot have non-empty directory in archived classpaths", name);

View File

@ -283,11 +283,15 @@ public:
bool validate_classpath_entry_table(); bool validate_classpath_entry_table();
static SharedClassPathEntry* shared_classpath(int index) { static SharedClassPathEntry* shared_classpath(int index) {
if (index < 0) {
return NULL;
}
char* p = (char*)_classpath_entry_table; char* p = (char*)_classpath_entry_table;
p += _classpath_entry_size * index; p += _classpath_entry_size * index;
return (SharedClassPathEntry*)p; return (SharedClassPathEntry*)p;
} }
static const char* shared_classpath_name(int index) { static const char* shared_classpath_name(int index) {
assert(index >= 0, "Sanity");
return shared_classpath(index)->_name; return shared_classpath(index)->_name;
} }

View File

@ -249,10 +249,65 @@ class ChunkManager : public CHeapObj<mtInternal> {
void print_on(outputStream* st) const; void print_on(outputStream* st) const;
}; };
class SmallBlocks : public CHeapObj<mtClass> {
const static uint _small_block_max_size = sizeof(TreeChunk<Metablock, FreeList<Metablock> >)/HeapWordSize;
const static uint _small_block_min_size = sizeof(Metablock)/HeapWordSize;
private:
FreeList<Metablock> _small_lists[_small_block_max_size - _small_block_min_size];
FreeList<Metablock>& list_at(size_t word_size) {
assert(word_size >= _small_block_min_size, "There are no metaspace objects less than %u words", _small_block_min_size);
return _small_lists[word_size - _small_block_min_size];
}
public:
SmallBlocks() {
for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
uint k = i - _small_block_min_size;
_small_lists[k].set_size(i);
}
}
size_t total_size() const {
size_t result = 0;
for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
uint k = i - _small_block_min_size;
result = result + _small_lists[k].count() * _small_lists[k].size();
}
return result;
}
static uint small_block_max_size() { return _small_block_max_size; }
static uint small_block_min_size() { return _small_block_min_size; }
MetaWord* get_block(size_t word_size) {
if (list_at(word_size).count() > 0) {
MetaWord* new_block = (MetaWord*) list_at(word_size).get_chunk_at_head();
return new_block;
} else {
return NULL;
}
}
void return_block(Metablock* free_chunk, size_t word_size) {
list_at(word_size).return_chunk_at_head(free_chunk, false);
assert(list_at(word_size).count() > 0, "Should have a chunk");
}
void print_on(outputStream* st) const {
st->print_cr("SmallBlocks:");
for (uint i = _small_block_min_size; i < _small_block_max_size; i++) {
uint k = i - _small_block_min_size;
st->print_cr("small_lists size " SIZE_FORMAT " count " SIZE_FORMAT, _small_lists[k].size(), _small_lists[k].count());
}
}
};
// Used to manage the free list of Metablocks (a block corresponds // Used to manage the free list of Metablocks (a block corresponds
// to the allocation of a quantum of metadata). // to the allocation of a quantum of metadata).
class BlockFreelist VALUE_OBJ_CLASS_SPEC { class BlockFreelist : public CHeapObj<mtClass> {
BlockTreeDictionary* const _dictionary; BlockTreeDictionary* const _dictionary;
SmallBlocks* _small_blocks;
// Only allocate and split from freelist if the size of the allocation // Only allocate and split from freelist if the size of the allocation
// is at least 1/4th the size of the available block. // is at least 1/4th the size of the available block.
@ -260,6 +315,12 @@ class BlockFreelist VALUE_OBJ_CLASS_SPEC {
// Accessors // Accessors
BlockTreeDictionary* dictionary() const { return _dictionary; } BlockTreeDictionary* dictionary() const { return _dictionary; }
SmallBlocks* small_blocks() {
if (_small_blocks == NULL) {
_small_blocks = new SmallBlocks();
}
return _small_blocks;
}
public: public:
BlockFreelist(); BlockFreelist();
@ -269,8 +330,15 @@ class BlockFreelist VALUE_OBJ_CLASS_SPEC {
MetaWord* get_block(size_t word_size); MetaWord* get_block(size_t word_size);
void return_block(MetaWord* p, size_t word_size); void return_block(MetaWord* p, size_t word_size);
size_t total_size() { return dictionary()->total_size(); } size_t total_size() const {
size_t result = dictionary()->total_size();
if (_small_blocks != NULL) {
result = result + _small_blocks->total_size();
}
return result;
}
static size_t min_dictionary_size() { return TreeChunk<Metablock, FreeList<Metablock> >::min_size(); }
void print_on(outputStream* st) const; void print_on(outputStream* st) const;
}; };
@ -629,7 +697,7 @@ class SpaceManager : public CHeapObj<mtClass> {
// are assumed to be in chunks in use by the SpaceManager // are assumed to be in chunks in use by the SpaceManager
// and all chunks in use by a SpaceManager are freed when // and all chunks in use by a SpaceManager are freed when
// the class loader using the SpaceManager is collected. // the class loader using the SpaceManager is collected.
BlockFreelist _block_freelists; BlockFreelist* _block_freelists;
// protects virtualspace and chunk expansions // protects virtualspace and chunk expansions
static const char* _expand_lock_name; static const char* _expand_lock_name;
@ -643,9 +711,7 @@ class SpaceManager : public CHeapObj<mtClass> {
_chunks_in_use[index] = v; _chunks_in_use[index] = v;
} }
BlockFreelist* block_freelists() const { BlockFreelist* block_freelists() const { return _block_freelists; }
return (BlockFreelist*) &_block_freelists;
}
Metaspace::MetadataType mdtype() { return _mdtype; } Metaspace::MetadataType mdtype() { return _mdtype; }
@ -763,7 +829,9 @@ class SpaceManager : public CHeapObj<mtClass> {
void verify_allocated_blocks_words(); void verify_allocated_blocks_words();
#endif #endif
size_t get_raw_word_size(size_t word_size) { // This adjusts the size given to be greater than the minimum allocation size in
// words for data in metaspace. Esentially the minimum size is currently 3 words.
size_t get_allocation_word_size(size_t word_size) {
size_t byte_size = word_size * BytesPerWord; size_t byte_size = word_size * BytesPerWord;
size_t raw_bytes_size = MAX2(byte_size, sizeof(Metablock)); size_t raw_bytes_size = MAX2(byte_size, sizeof(Metablock));
@ -807,20 +875,45 @@ void VirtualSpaceNode::verify_container_count() {
// BlockFreelist methods // BlockFreelist methods
BlockFreelist::BlockFreelist() : _dictionary(new BlockTreeDictionary()) {} BlockFreelist::BlockFreelist() : _dictionary(new BlockTreeDictionary()), _small_blocks(NULL) {}
BlockFreelist::~BlockFreelist() { BlockFreelist::~BlockFreelist() {
delete _dictionary; delete _dictionary;
if (_small_blocks != NULL) {
delete _small_blocks;
}
} }
void BlockFreelist::return_block(MetaWord* p, size_t word_size) { void BlockFreelist::return_block(MetaWord* p, size_t word_size) {
assert(word_size >= SmallBlocks::small_block_min_size(), "never return dark matter");
Metablock* free_chunk = ::new (p) Metablock(word_size); Metablock* free_chunk = ::new (p) Metablock(word_size);
if (word_size < SmallBlocks::small_block_max_size()) {
small_blocks()->return_block(free_chunk, word_size);
} else {
dictionary()->return_chunk(free_chunk); dictionary()->return_chunk(free_chunk);
} }
log_trace(gc, metaspace, freelist, blocks)("returning block at " INTPTR_FORMAT " size = "
SIZE_FORMAT, p2i(free_chunk), word_size);
}
MetaWord* BlockFreelist::get_block(size_t word_size) { MetaWord* BlockFreelist::get_block(size_t word_size) {
if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) { assert(word_size >= SmallBlocks::small_block_min_size(), "never get dark matter");
// Dark matter. Too small for dictionary.
// Try small_blocks first.
if (word_size < SmallBlocks::small_block_max_size()) {
// Don't create small_blocks() until needed. small_blocks() allocates the small block list for
// this space manager.
MetaWord* new_block = (MetaWord*) small_blocks()->get_block(word_size);
if (new_block != NULL) {
log_trace(gc, metaspace, freelist, blocks)("getting block at " INTPTR_FORMAT " size = " SIZE_FORMAT,
p2i(new_block), word_size);
return new_block;
}
}
if (word_size < BlockFreelist::min_dictionary_size()) {
// If allocation in small blocks fails, this is Dark Matter. Too small for dictionary.
return NULL; return NULL;
} }
@ -839,15 +932,20 @@ MetaWord* BlockFreelist::get_block(size_t word_size) {
MetaWord* new_block = (MetaWord*)free_block; MetaWord* new_block = (MetaWord*)free_block;
assert(block_size >= word_size, "Incorrect size of block from freelist"); assert(block_size >= word_size, "Incorrect size of block from freelist");
const size_t unused = block_size - word_size; const size_t unused = block_size - word_size;
if (unused >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) { if (unused >= SmallBlocks::small_block_min_size()) {
return_block(new_block + word_size, unused); return_block(new_block + word_size, unused);
} }
log_trace(gc, metaspace, freelist, blocks)("getting block at " INTPTR_FORMAT " size = " SIZE_FORMAT,
p2i(new_block), word_size);
return new_block; return new_block;
} }
void BlockFreelist::print_on(outputStream* st) const { void BlockFreelist::print_on(outputStream* st) const {
dictionary()->print_free_lists(st); dictionary()->print_free_lists(st);
if (_small_blocks != NULL) {
_small_blocks->print_on(st);
}
} }
// VirtualSpaceNode methods // VirtualSpaceNode methods
@ -2075,6 +2173,7 @@ SpaceManager::SpaceManager(Metaspace::MetadataType mdtype,
_allocated_blocks_words(0), _allocated_blocks_words(0),
_allocated_chunks_words(0), _allocated_chunks_words(0),
_allocated_chunks_count(0), _allocated_chunks_count(0),
_block_freelists(NULL),
_lock(lock) _lock(lock)
{ {
initialize(); initialize();
@ -2164,8 +2263,10 @@ SpaceManager::~SpaceManager() {
log.trace("~SpaceManager(): " PTR_FORMAT, p2i(this)); log.trace("~SpaceManager(): " PTR_FORMAT, p2i(this));
ResourceMark rm; ResourceMark rm;
locked_print_chunks_in_use_on(log.trace_stream()); locked_print_chunks_in_use_on(log.trace_stream());
if (block_freelists() != NULL) {
block_freelists()->print_on(log.trace_stream()); block_freelists()->print_on(log.trace_stream());
} }
}
// Have to update before the chunks_in_use lists are emptied // Have to update before the chunks_in_use lists are emptied
// below. // below.
@ -2215,6 +2316,10 @@ SpaceManager::~SpaceManager() {
} }
log.trace("updated dictionary count " SIZE_FORMAT " %s", chunk_manager()->humongous_dictionary()->total_count(), chunk_size_name(HumongousIndex)); log.trace("updated dictionary count " SIZE_FORMAT " %s", chunk_manager()->humongous_dictionary()->total_count(), chunk_size_name(HumongousIndex));
chunk_manager()->slow_locked_verify(); chunk_manager()->slow_locked_verify();
if (_block_freelists != NULL) {
delete _block_freelists;
}
} }
const char* SpaceManager::chunk_size_name(ChunkIndex index) const { const char* SpaceManager::chunk_size_name(ChunkIndex index) const {
@ -2253,10 +2358,12 @@ ChunkIndex ChunkManager::list_index(size_t size) {
void SpaceManager::deallocate(MetaWord* p, size_t word_size) { void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
assert_lock_strong(_lock); assert_lock_strong(_lock);
size_t raw_word_size = get_raw_word_size(word_size); // Allocations and deallocations are in raw_word_size
size_t min_size = TreeChunk<Metablock, FreeList<Metablock> >::min_size(); size_t raw_word_size = get_allocation_word_size(word_size);
assert(raw_word_size >= min_size, // Lazily create a block_freelist
"Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size); if (block_freelists() == NULL) {
_block_freelists = new BlockFreelist();
}
block_freelists()->return_block(p, raw_word_size); block_freelists()->return_block(p, raw_word_size);
} }
@ -2312,8 +2419,9 @@ void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
void SpaceManager::retire_current_chunk() { void SpaceManager::retire_current_chunk() {
if (current_chunk() != NULL) { if (current_chunk() != NULL) {
size_t remaining_words = current_chunk()->free_word_size(); size_t remaining_words = current_chunk()->free_word_size();
if (remaining_words >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) { if (remaining_words >= BlockFreelist::min_dictionary_size()) {
block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words); MetaWord* ptr = current_chunk()->allocate(remaining_words);
deallocate(ptr, remaining_words);
inc_used_metrics(remaining_words); inc_used_metrics(remaining_words);
} }
} }
@ -2350,7 +2458,7 @@ Metachunk* SpaceManager::get_new_chunk(size_t word_size,
* will be made to allocate a small chunk. * will be made to allocate a small chunk.
*/ */
MetaWord* SpaceManager::get_small_chunk_and_allocate(size_t word_size) { MetaWord* SpaceManager::get_small_chunk_and_allocate(size_t word_size) {
size_t raw_word_size = get_raw_word_size(word_size); size_t raw_word_size = get_allocation_word_size(word_size);
if (raw_word_size + Metachunk::overhead() > small_chunk_size()) { if (raw_word_size + Metachunk::overhead() > small_chunk_size()) {
return NULL; return NULL;
@ -2380,8 +2488,7 @@ MetaWord* SpaceManager::get_small_chunk_and_allocate(size_t word_size) {
MetaWord* SpaceManager::allocate(size_t word_size) { MetaWord* SpaceManager::allocate(size_t word_size) {
MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag); MutexLockerEx cl(lock(), Mutex::_no_safepoint_check_flag);
size_t raw_word_size = get_allocation_word_size(word_size);
size_t raw_word_size = get_raw_word_size(word_size);
BlockFreelist* fl = block_freelists(); BlockFreelist* fl = block_freelists();
MetaWord* p = NULL; MetaWord* p = NULL;
// Allocation from the dictionary is expensive in the sense that // Allocation from the dictionary is expensive in the sense that
@ -2389,7 +2496,7 @@ MetaWord* SpaceManager::allocate(size_t word_size) {
// from the dictionary until it starts to get fat. Is this // from the dictionary until it starts to get fat. Is this
// a reasonable policy? Maybe an skinny dictionary is fast enough // a reasonable policy? Maybe an skinny dictionary is fast enough
// for allocations. Do some profiling. JJJ // for allocations. Do some profiling. JJJ
if (fl->total_size() > allocation_from_dictionary_limit) { if (fl != NULL && fl->total_size() > allocation_from_dictionary_limit) {
p = fl->get_block(raw_word_size); p = fl->get_block(raw_word_size);
} }
if (p == NULL) { if (p == NULL) {
@ -2441,7 +2548,7 @@ void SpaceManager::verify() {
// If there are blocks in the dictionary, then // If there are blocks in the dictionary, then
// verification of chunks does not work since // verification of chunks does not work since
// being in the dictionary alters a chunk. // being in the dictionary alters a chunk.
if (block_freelists()->total_size() == 0) { if (block_freelists() != NULL && block_freelists()->total_size() == 0) {
for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) { for (ChunkIndex i = ZeroIndex; i < NumberOfInUseLists; i = next_chunk_index(i)) {
Metachunk* curr = chunks_in_use(i); Metachunk* curr = chunks_in_use(i);
while (curr != NULL) { while (curr != NULL) {
@ -2499,7 +2606,7 @@ void SpaceManager::dump(outputStream* const out) const {
} }
if (log_is_enabled(Trace, gc, metaspace, freelist)) { if (log_is_enabled(Trace, gc, metaspace, freelist)) {
block_freelists()->print_on(out); if (block_freelists() != NULL) block_freelists()->print_on(out);
} }
size_t free = current_chunk() == NULL ? 0 : current_chunk()->free_word_size(); size_t free = current_chunk() == NULL ? 0 : current_chunk()->free_word_size();
@ -3410,18 +3517,11 @@ void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
|| Thread::current()->is_VM_thread(), "should be the VM thread"); || Thread::current()->is_VM_thread(), "should be the VM thread");
if (DumpSharedSpaces && PrintSharedSpaces) { if (DumpSharedSpaces && PrintSharedSpaces) {
record_deallocation(ptr, vsm()->get_raw_word_size(word_size)); record_deallocation(ptr, vsm()->get_allocation_word_size(word_size));
} }
MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag); MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
// Dark matter. Too small for dictionary.
#ifdef ASSERT
Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
#endif
return;
}
if (is_class && using_class_space()) { if (is_class && using_class_space()) {
class_vsm()->deallocate(ptr, word_size); class_vsm()->deallocate(ptr, word_size);
} else { } else {
@ -3451,7 +3551,7 @@ MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
report_out_of_shared_space(read_only ? SharedReadOnly : SharedReadWrite); report_out_of_shared_space(read_only ? SharedReadOnly : SharedReadWrite);
} }
if (PrintSharedSpaces) { if (PrintSharedSpaces) {
space->record_allocation(result, type, space->vsm()->get_raw_word_size(word_size)); space->record_allocation(result, type, space->vsm()->get_allocation_word_size(word_size));
} }
// Zero initialize. // Zero initialize.
@ -3509,10 +3609,11 @@ void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_s
// If result is still null, we are out of memory. // If result is still null, we are out of memory.
Log(gc, metaspace, freelist) log; Log(gc, metaspace, freelist) log;
if (log.is_trace()) { if (log.is_info()) {
log.trace("Metaspace allocation failed for size " SIZE_FORMAT, word_size); log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
is_class_space_allocation(mdtype) ? "class" : "data", word_size);
ResourceMark rm; ResourceMark rm;
outputStream* out = log.trace_stream(); outputStream* out = log.info_stream();
if (loader_data->metaspace_or_null() != NULL) { if (loader_data->metaspace_or_null() != NULL) {
loader_data->dump(out); loader_data->dump(out);
} }

View File

@ -368,23 +368,36 @@ AnnotationArray** ConstMethod::default_annotations_addr() const {
return (AnnotationArray**)constMethod_end() - offset; return (AnnotationArray**)constMethod_end() - offset;
} }
Array<u1>* copy_annotations(ClassLoaderData* loader_data, AnnotationArray* from, TRAPS) {
int length = from->length();
Array<u1>* a = MetadataFactory::new_array<u1>(loader_data, length, 0, CHECK_NULL);
memcpy((void*)a->adr_at(0), (void*)from->adr_at(0), length);
return a;
}
// copy annotations from 'cm' to 'this' // copy annotations from 'cm' to 'this'
void ConstMethod::copy_annotations_from(ConstMethod* cm) { // Must make copy because these are deallocated with their constMethod, if redefined.
void ConstMethod::copy_annotations_from(ClassLoaderData* loader_data, ConstMethod* cm, TRAPS) {
Array<u1>* a;
if (cm->has_method_annotations()) { if (cm->has_method_annotations()) {
assert(has_method_annotations(), "should be allocated already"); assert(has_method_annotations(), "should be allocated already");
set_method_annotations(cm->method_annotations()); a = copy_annotations(loader_data, cm->method_annotations(), CHECK);
set_method_annotations(a);
} }
if (cm->has_parameter_annotations()) { if (cm->has_parameter_annotations()) {
assert(has_parameter_annotations(), "should be allocated already"); assert(has_parameter_annotations(), "should be allocated already");
set_parameter_annotations(cm->parameter_annotations()); a = copy_annotations(loader_data, cm->parameter_annotations(), CHECK);
set_parameter_annotations(a);
} }
if (cm->has_type_annotations()) { if (cm->has_type_annotations()) {
assert(has_type_annotations(), "should be allocated already"); assert(has_type_annotations(), "should be allocated already");
set_type_annotations(cm->type_annotations()); a = copy_annotations(loader_data, cm->type_annotations(), CHECK);
set_type_annotations(a);
} }
if (cm->has_default_annotations()) { if (cm->has_default_annotations()) {
assert(has_default_annotations(), "should be allocated already"); assert(has_default_annotations(), "should be allocated already");
set_default_annotations(cm->default_annotations()); a = copy_annotations(loader_data, cm->default_annotations(), CHECK);
set_default_annotations(a);
} }
} }

View File

@ -469,7 +469,7 @@ public:
} }
// Copy annotations from other ConstMethod // Copy annotations from other ConstMethod
void copy_annotations_from(ConstMethod* cm); void copy_annotations_from(ClassLoaderData* loader_data, ConstMethod* cm, TRAPS);
// byte codes // byte codes
void set_code(address code) { void set_code(address code) {

View File

@ -674,20 +674,20 @@ void InstanceKlass::link_methods(TRAPS) {
// Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) { void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) {
assert (this_k->has_default_methods(), "caller should have checked this"); assert (this_k->has_nonstatic_concrete_methods(), "caller should have checked this");
for (int i = 0; i < this_k->local_interfaces()->length(); ++i) { for (int i = 0; i < this_k->local_interfaces()->length(); ++i) {
Klass* iface = this_k->local_interfaces()->at(i); Klass* iface = this_k->local_interfaces()->at(i);
InstanceKlass* ik = InstanceKlass::cast(iface); InstanceKlass* ik = InstanceKlass::cast(iface);
// Initialization is depth first search ie. we start with top of the inheritance tree // Initialization is depth first search ie. we start with top of the inheritance tree
// has_default_methods drives searching superinterfaces since it // has_nonstatic_concrete_methods drives searching superinterfaces since it
// means has_default_methods in its superinterface hierarchy // means has_nonstatic_concrete_methods in its superinterface hierarchy
if (ik->has_default_methods()) { if (ik->has_nonstatic_concrete_methods()) {
ik->initialize_super_interfaces(ik, CHECK); ik->initialize_super_interfaces(ik, CHECK);
} }
// Only initialize() interfaces that "declare" concrete methods. // Only initialize() interfaces that "declare" concrete methods.
if (ik->should_be_initialized() && ik->declares_default_methods()) { if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
ik->initialize(CHECK); ik->initialize(CHECK);
} }
} }
@ -761,11 +761,11 @@ void InstanceKlass::initialize_impl(instanceKlassHandle this_k, TRAPS) {
if (super_klass != NULL && super_klass->should_be_initialized()) { if (super_klass != NULL && super_klass->should_be_initialized()) {
super_klass->initialize(THREAD); super_klass->initialize(THREAD);
} }
// If C implements any interfaces that declares a non-abstract, non-static method, // If C implements any interface that declares a non-static, concrete method,
// the initialization of C triggers initialization of its super interfaces. // the initialization of C triggers initialization of its super interfaces.
// Only need to recurse if has_default_methods which includes declaring and // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
// inheriting default methods // having a superinterface that declares, non-static, concrete methods
if (!HAS_PENDING_EXCEPTION && this_k->has_default_methods()) { if (!HAS_PENDING_EXCEPTION && this_k->has_nonstatic_concrete_methods()) {
this_k->initialize_super_interfaces(this_k, THREAD); this_k->initialize_super_interfaces(this_k, THREAD);
} }

Some files were not shown because too many files have changed in this diff Show More