Merge
This commit is contained in:
commit
640bddea5e
1
.hgtags
1
.hgtags
@ -380,3 +380,4 @@ a71210c0d9800eb6925b61ecd6198abd554f90ee jdk-9+134
|
||||
e384420383a5b79fa0012ebcb25d8f83cff7f777 jdk-9+135
|
||||
1b4b5d01aa11edf24b6fadbe3d2f3e411e3b02cd jdk-9+136
|
||||
9cb87c88ed851c0575b8ead753ea238ed5b544e9 jdk-9+137
|
||||
d273dfe9a126d3bffe92072547fef2cd1361b0eb jdk-9+138
|
||||
|
@ -33,6 +33,7 @@ include @SPEC@
|
||||
CC := @BUILD_CC@
|
||||
CXX := @BUILD_CXX@
|
||||
LD := @BUILD_LD@
|
||||
LDCXX := @BUILD_LDCXX@
|
||||
AS := @BUILD_AS@
|
||||
NM := @BUILD_NM@
|
||||
AR := @BUILD_AR@
|
||||
|
@ -687,7 +687,6 @@ XMKMF
|
||||
MSVCP_DLL
|
||||
MSVCR_DLL
|
||||
LIBCXX
|
||||
STATIC_CXX_SETTING
|
||||
FIXPATH_DETACH_FLAG
|
||||
FIXPATH
|
||||
BUILD_GTEST
|
||||
@ -5092,7 +5091,7 @@ VS_SDK_PLATFORM_NAME_2013=
|
||||
#CUSTOM_AUTOCONF_INCLUDE
|
||||
|
||||
# Do not change or remove the following line, it is needed for consistency checks:
|
||||
DATE_WHEN_GENERATED=1474894604
|
||||
DATE_WHEN_GENERATED=1475218974
|
||||
|
||||
###############################################################################
|
||||
#
|
||||
@ -53087,49 +53086,10 @@ fi
|
||||
|
||||
|
||||
if test "x$OPENJDK_TARGET_OS" = xlinux; then
|
||||
# Test if -lstdc++ works.
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if dynamic link of stdc++ is possible" >&5
|
||||
$as_echo_n "checking if dynamic link of stdc++ is possible... " >&6; }
|
||||
ac_ext=cpp
|
||||
ac_cpp='$CXXCPP $CPPFLAGS'
|
||||
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
|
||||
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
|
||||
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
|
||||
|
||||
OLD_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS -lstdc++"
|
||||
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
|
||||
/* end confdefs.h. */
|
||||
|
||||
int
|
||||
main ()
|
||||
{
|
||||
return 0;
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
_ACEOF
|
||||
if ac_fn_cxx_try_link "$LINENO"; then :
|
||||
has_dynamic_libstdcxx=yes
|
||||
else
|
||||
has_dynamic_libstdcxx=no
|
||||
fi
|
||||
rm -f core conftest.err conftest.$ac_objext \
|
||||
conftest$ac_exeext conftest.$ac_ext
|
||||
CXXFLAGS="$OLD_CXXFLAGS"
|
||||
ac_ext=cpp
|
||||
ac_cpp='$CXXCPP $CPPFLAGS'
|
||||
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
|
||||
ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
|
||||
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $has_dynamic_libstdcxx" >&5
|
||||
$as_echo "$has_dynamic_libstdcxx" >&6; }
|
||||
|
||||
# Test if stdc++ can be linked statically.
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if static link of stdc++ is possible" >&5
|
||||
$as_echo_n "checking if static link of stdc++ is possible... " >&6; }
|
||||
STATIC_STDCXX_FLAGS="-Wl,-Bstatic -lstdc++ -lgcc -Wl,-Bdynamic"
|
||||
STATIC_STDCXX_FLAGS="-static-libstdc++ -static-libgcc"
|
||||
ac_ext=cpp
|
||||
ac_cpp='$CXXCPP $CPPFLAGS'
|
||||
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
|
||||
@ -53137,9 +53097,7 @@ ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ex
|
||||
ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
|
||||
|
||||
OLD_LIBS="$LIBS"
|
||||
OLD_CXX="$CXX"
|
||||
LIBS="$STATIC_STDCXX_FLAGS"
|
||||
CXX="$CC"
|
||||
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
|
||||
/* end confdefs.h. */
|
||||
|
||||
@ -53159,7 +53117,6 @@ fi
|
||||
rm -f core conftest.err conftest.$ac_objext \
|
||||
conftest$ac_exeext conftest.$ac_ext
|
||||
LIBS="$OLD_LIBS"
|
||||
CXX="$OLD_CXX"
|
||||
ac_ext=cpp
|
||||
ac_cpp='$CXXCPP $CPPFLAGS'
|
||||
ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
|
||||
@ -53169,59 +53126,34 @@ ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $has_static_libstdcxx" >&5
|
||||
$as_echo "$has_static_libstdcxx" >&6; }
|
||||
|
||||
if test "x$has_static_libstdcxx" = xno && test "x$has_dynamic_libstdcxx" = xno; then
|
||||
as_fn_error $? "Cannot link to stdc++, neither dynamically nor statically!" "$LINENO" 5
|
||||
fi
|
||||
|
||||
if test "x$with_stdc__lib" = xstatic && test "x$has_static_libstdcxx" = xno; then
|
||||
as_fn_error $? "Static linking of libstdc++ was not possible!" "$LINENO" 5
|
||||
fi
|
||||
|
||||
if test "x$with_stdc__lib" = xdynamic && test "x$has_dynamic_libstdcxx" = xno; then
|
||||
as_fn_error $? "Dynamic linking of libstdc++ was not possible!" "$LINENO" 5
|
||||
fi
|
||||
|
||||
# If dynamic was requested, it's available since it would fail above otherwise.
|
||||
# If dynamic wasn't requested, go with static unless it isn't available.
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libstdc++" >&5
|
||||
$as_echo_n "checking how to link with libstdc++... " >&6; }
|
||||
if test "x$with_stdc__lib" = xdynamic || test "x$has_static_libstdcxx" = xno || [[ " $JVM_VARIANTS " =~ " zeroshark " ]] ; then
|
||||
LIBCXX="$LIBCXX -lstdc++"
|
||||
# To help comparisons with old build, put stdc++ first in JVM_LIBS
|
||||
JVM_LIBS="-lstdc++ $JVM_LIBS"
|
||||
# Ideally, we should test stdc++ for the BUILD toolchain separately. For now
|
||||
# just use the same setting as for the TARGET toolchain.
|
||||
OPENJDK_BUILD_JVM_LIBS="-lstdc++ $OPENJDK_BUILD_JVM_LIBS"
|
||||
LDCXX="$CXX"
|
||||
STATIC_CXX_SETTING="STATIC_CXX=false"
|
||||
if test "x$with_stdc__lib" = xdynamic || test "x$has_static_libstdcxx" = xno \
|
||||
|| [[ " $JVM_VARIANTS " =~ " zeroshark " ]] ; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: dynamic" >&5
|
||||
$as_echo "dynamic" >&6; }
|
||||
else
|
||||
LIBCXX="$LIBCXX $STATIC_STDCXX_FLAGS"
|
||||
JVM_LDFLAGS="$JVM_LDFLAGS -static-libgcc"
|
||||
# To help comparisons with old build, put stdc++ first in JVM_LIBS
|
||||
JVM_LIBS="-Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $JVM_LIBS"
|
||||
JVM_LDFLAGS="$JVM_LDFLAGS $STATIC_STDCXX_FLAGS"
|
||||
# Ideally, we should test stdc++ for the BUILD toolchain separately. For now
|
||||
# just use the same setting as for the TARGET toolchain.
|
||||
OPENJDK_BUILD_JVM_LDFLAGS="$OPENJDK_BUILD_JVM_LDFLAGS -static-libgcc"
|
||||
OPENJDK_BUILD_JVM_LIBS="-Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $OPENJDK_BUILD_JVM_LIBS"
|
||||
LDCXX="$CC"
|
||||
STATIC_CXX_SETTING="STATIC_CXX=true"
|
||||
OPENJDK_BUILD_JVM_LDFLAGS="$OPENJDK_BUILD_JVM_LDFLAGS $STATIC_STDCXX_FLAGS"
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: static" >&5
|
||||
$as_echo "static" >&6; }
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# libCrun is the c++ runtime-library with SunStudio (roughly the equivalent of gcc's libstdc++.so)
|
||||
if test "x$TOOLCHAIN_TYPE" = xsolstudio && test "x$LIBCXX" = x; then
|
||||
LIBCXX="${SYSROOT}/usr/lib${OPENJDK_TARGET_CPU_ISADIR}/libCrun.so.1"
|
||||
fi
|
||||
|
||||
# TODO better (platform agnostic) test
|
||||
if test "x$OPENJDK_TARGET_OS" = xmacosx && test "x$LIBCXX" = x && test "x$TOOLCHAIN_TYPE" = xgcc; then
|
||||
LIBCXX="-lstdc++"
|
||||
fi
|
||||
|
||||
|
||||
# Setup Windows runtime dlls
|
||||
|
@ -45,84 +45,44 @@ AC_DEFUN_ONCE([LIB_SETUP_STD_LIBS],
|
||||
)
|
||||
|
||||
if test "x$OPENJDK_TARGET_OS" = xlinux; then
|
||||
# Test if -lstdc++ works.
|
||||
AC_MSG_CHECKING([if dynamic link of stdc++ is possible])
|
||||
AC_LANG_PUSH(C++)
|
||||
OLD_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS -lstdc++"
|
||||
AC_LINK_IFELSE([AC_LANG_PROGRAM([], [return 0;])],
|
||||
[has_dynamic_libstdcxx=yes],
|
||||
[has_dynamic_libstdcxx=no])
|
||||
CXXFLAGS="$OLD_CXXFLAGS"
|
||||
AC_LANG_POP(C++)
|
||||
AC_MSG_RESULT([$has_dynamic_libstdcxx])
|
||||
|
||||
# Test if stdc++ can be linked statically.
|
||||
AC_MSG_CHECKING([if static link of stdc++ is possible])
|
||||
STATIC_STDCXX_FLAGS="-Wl,-Bstatic -lstdc++ -lgcc -Wl,-Bdynamic"
|
||||
STATIC_STDCXX_FLAGS="-static-libstdc++ -static-libgcc"
|
||||
AC_LANG_PUSH(C++)
|
||||
OLD_LIBS="$LIBS"
|
||||
OLD_CXX="$CXX"
|
||||
LIBS="$STATIC_STDCXX_FLAGS"
|
||||
CXX="$CC"
|
||||
AC_LINK_IFELSE([AC_LANG_PROGRAM([], [return 0;])],
|
||||
[has_static_libstdcxx=yes],
|
||||
[has_static_libstdcxx=no])
|
||||
LIBS="$OLD_LIBS"
|
||||
CXX="$OLD_CXX"
|
||||
AC_LANG_POP(C++)
|
||||
AC_MSG_RESULT([$has_static_libstdcxx])
|
||||
|
||||
if test "x$has_static_libstdcxx" = xno && test "x$has_dynamic_libstdcxx" = xno; then
|
||||
AC_MSG_ERROR([Cannot link to stdc++, neither dynamically nor statically!])
|
||||
fi
|
||||
|
||||
if test "x$with_stdc__lib" = xstatic && test "x$has_static_libstdcxx" = xno; then
|
||||
AC_MSG_ERROR([Static linking of libstdc++ was not possible!])
|
||||
fi
|
||||
|
||||
if test "x$with_stdc__lib" = xdynamic && test "x$has_dynamic_libstdcxx" = xno; then
|
||||
AC_MSG_ERROR([Dynamic linking of libstdc++ was not possible!])
|
||||
fi
|
||||
|
||||
# If dynamic was requested, it's available since it would fail above otherwise.
|
||||
# If dynamic wasn't requested, go with static unless it isn't available.
|
||||
AC_MSG_CHECKING([how to link with libstdc++])
|
||||
if test "x$with_stdc__lib" = xdynamic || test "x$has_static_libstdcxx" = xno || HOTSPOT_CHECK_JVM_VARIANT(zeroshark); then
|
||||
LIBCXX="$LIBCXX -lstdc++"
|
||||
# To help comparisons with old build, put stdc++ first in JVM_LIBS
|
||||
JVM_LIBS="-lstdc++ $JVM_LIBS"
|
||||
# Ideally, we should test stdc++ for the BUILD toolchain separately. For now
|
||||
# just use the same setting as for the TARGET toolchain.
|
||||
OPENJDK_BUILD_JVM_LIBS="-lstdc++ $OPENJDK_BUILD_JVM_LIBS"
|
||||
LDCXX="$CXX"
|
||||
STATIC_CXX_SETTING="STATIC_CXX=false"
|
||||
if test "x$with_stdc__lib" = xdynamic || test "x$has_static_libstdcxx" = xno \
|
||||
|| HOTSPOT_CHECK_JVM_VARIANT(zeroshark); then
|
||||
AC_MSG_RESULT([dynamic])
|
||||
else
|
||||
LIBCXX="$LIBCXX $STATIC_STDCXX_FLAGS"
|
||||
JVM_LDFLAGS="$JVM_LDFLAGS -static-libgcc"
|
||||
# To help comparisons with old build, put stdc++ first in JVM_LIBS
|
||||
JVM_LIBS="-Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $JVM_LIBS"
|
||||
JVM_LDFLAGS="$JVM_LDFLAGS $STATIC_STDCXX_FLAGS"
|
||||
# Ideally, we should test stdc++ for the BUILD toolchain separately. For now
|
||||
# just use the same setting as for the TARGET toolchain.
|
||||
OPENJDK_BUILD_JVM_LDFLAGS="$OPENJDK_BUILD_JVM_LDFLAGS -static-libgcc"
|
||||
OPENJDK_BUILD_JVM_LIBS="-Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $OPENJDK_BUILD_JVM_LIBS"
|
||||
LDCXX="$CC"
|
||||
STATIC_CXX_SETTING="STATIC_CXX=true"
|
||||
OPENJDK_BUILD_JVM_LDFLAGS="$OPENJDK_BUILD_JVM_LDFLAGS $STATIC_STDCXX_FLAGS"
|
||||
AC_MSG_RESULT([static])
|
||||
fi
|
||||
fi
|
||||
AC_SUBST(STATIC_CXX_SETTING)
|
||||
|
||||
# libCrun is the c++ runtime-library with SunStudio (roughly the equivalent of gcc's libstdc++.so)
|
||||
if test "x$TOOLCHAIN_TYPE" = xsolstudio && test "x$LIBCXX" = x; then
|
||||
LIBCXX="${SYSROOT}/usr/lib${OPENJDK_TARGET_CPU_ISADIR}/libCrun.so.1"
|
||||
fi
|
||||
|
||||
# TODO better (platform agnostic) test
|
||||
if test "x$OPENJDK_TARGET_OS" = xmacosx && test "x$LIBCXX" = x && test "x$TOOLCHAIN_TYPE" = xgcc; then
|
||||
LIBCXX="-lstdc++"
|
||||
fi
|
||||
AC_SUBST(LIBCXX)
|
||||
|
||||
# Setup Windows runtime dlls
|
||||
|
@ -654,10 +654,10 @@ compare_bin_file() {
|
||||
OTHER_DIZ_FILE="$OTHER/support/native/java.base/java_objs/java.diz"
|
||||
elif [ "$NAME" = "jimage.exe" ] \
|
||||
&& [ -f "$OTHER/support/native/jdk.jlink/jimage_objs/jimage.diz" ]; then
|
||||
OTHER_DIZ_FILE="$OTHER/support/native/jdk.jlink/jimage_objs/jimage.diz"
|
||||
OTHER_DIZ_FILE="$OTHER/support/modules_cmds/jdk.jlink/jimage.diz"
|
||||
elif [ "$NAME" = "javacpl.exe" ] \
|
||||
&& [ -f "$OTHER/support/native/jdk.plugin/javacpl/javacpl.diz" ]; then
|
||||
OTHER_DIZ_FILE="$OTHER/support/native/jdk.plugin/javacpl/javacpl.diz"
|
||||
OTHER_DIZ_FILE="$OTHER/support/modules_cmds/jdk.deploy.controlpanel/javacpl.diz"
|
||||
elif [ -f "${OTHER_FILE_BASE}.diz" ]; then
|
||||
OTHER_DIZ_FILE=${OTHER_FILE_BASE}.diz
|
||||
else
|
||||
@ -686,10 +686,10 @@ compare_bin_file() {
|
||||
THIS_DIZ_FILE="$THIS/support/native/java.base/java_objs/java.diz"
|
||||
elif [ "$NAME" = "jimage.exe" ] \
|
||||
&& [ -f "$THIS/support/native/jdk.jlink/jimage_objs/jimage.diz" ]; then
|
||||
THIS_DIZ_FILE="$THIS/support/native/jdk.jlink/jimage_objs/jimage.diz"
|
||||
THIS_DIZ_FILE="$THIS/support/modules_cmds/jdk.jlink/jimage.diz"
|
||||
elif [ "$NAME" = "javacpl.exe" ] \
|
||||
&& [ -f "$THIS/support/native/jdk.plugin/javacpl/javacpl.diz" ]; then
|
||||
THIS_DIZ_FILE="$THIS/support/native/jdk.plugin/javacpl/javacpl.diz"
|
||||
THIS_DIZ_FILE="$THIS/support/modules_cmds/jdk.deploy.controlpanel/javacpl.diz"
|
||||
elif [ -f "${THIS_FILE_BASE}.diz" ]; then
|
||||
THIS_DIZ_FILE=${THIS_FILE/.dll/}.diz
|
||||
else
|
||||
|
@ -380,3 +380,4 @@ f7e1d5337c2e550fe553df7a3886bbed80292ecd jdk-9+131
|
||||
094d0db606db976045f594dba47d4593b715cc81 jdk-9+135
|
||||
aa053a3faf266c12b4fd5272da431a3e08e4a3e3 jdk-9+136
|
||||
258cf18fa7fc59359b874f8743b7168dc48baf73 jdk-9+137
|
||||
27bb44be32076861a0951bcefb07a1d92509a4b6 jdk-9+138
|
||||
|
@ -38,7 +38,7 @@ $(eval $(call SetupJavaCompilation,BUILD_TOOLS_CORBA, \
|
||||
SRC := $(CORBA_TOPDIR)/make/src/classes, \
|
||||
BIN := $(BUILDTOOLS_OUTPUTDIR)/corba_tools_classes))
|
||||
|
||||
TOOL_LOGUTIL_CMD := $(JAVA) -cp $(BUILDTOOLS_OUTPUTDIR)/corba_tools_classes \
|
||||
TOOL_LOGUTIL_CMD := $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/corba_tools_classes \
|
||||
build.tools.logutil.MC
|
||||
|
||||
$(eval $(call SetupJavaCompilation,BUILD_IDLJ, \
|
||||
@ -50,7 +50,7 @@ $(eval $(call SetupJavaCompilation,BUILD_IDLJ, \
|
||||
EXCLUDE_FILES := ResourceBundleUtil.java module-info.java))
|
||||
|
||||
# Force the language to english for predictable source code generation.
|
||||
TOOL_IDLJ_CMD := $(JAVA) -cp $(BUILDTOOLS_OUTPUTDIR)/idlj_classes \
|
||||
TOOL_IDLJ_CMD := $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/idlj_classes \
|
||||
-Duser.language=en com.sun.tools.corba.se.idl.toJavaPortable.Compile
|
||||
|
||||
################################################################################
|
||||
|
@ -540,3 +540,4 @@ b8b694c6b4d2ab0939aed7adaf0eec1ac321a085 jdk-9+134
|
||||
3b1c4562953db47e36b237a500f368d5c9746d47 jdk-9+135
|
||||
a20da289f646ee44440695b81abc0548330e4ca7 jdk-9+136
|
||||
dfcbf839e299e7e2bba1da69bdb347617ea4c7e8 jdk-9+137
|
||||
fc0956308c7a586267c5dd35dff74f773aa9c3eb jdk-9+138
|
||||
|
@ -55,7 +55,7 @@ endif
|
||||
# Disabling switch warning for clang because of test source.
|
||||
|
||||
$(eval $(call SetupNativeCompilation, BUILD_GTEST_LIBJVM, \
|
||||
TOOLCHAIN := $(JVM_TOOLCHAIN), \
|
||||
TOOLCHAIN := TOOLCHAIN_LINK_CXX, \
|
||||
LIBRARY := jvm, \
|
||||
OUTPUT_DIR := $(JVM_OUTPUTDIR)/gtest, \
|
||||
OBJECT_DIR := $(JVM_OUTPUTDIR)/gtest/objs, \
|
||||
@ -95,7 +95,7 @@ TARGETS += $(BUILD_GTEST_LIBJVM)
|
||||
################################################################################
|
||||
|
||||
$(eval $(call SetupNativeCompilation, BUILD_GTEST_LAUNCHER, \
|
||||
TOOLCHAIN := $(JVM_TOOLCHAIN), \
|
||||
TOOLCHAIN := TOOLCHAIN_LINK_CXX, \
|
||||
PROGRAM := gtestLauncher, \
|
||||
OUTPUT_DIR := $(JVM_OUTPUTDIR)/gtest, \
|
||||
EXTRA_FILES := $(GTEST_LAUNCHER_SRC), \
|
||||
|
@ -143,13 +143,6 @@ ifneq ($(filter $(OPENJDK_TARGET_OS), linux macosx windows), )
|
||||
JVM_PRECOMPILED_HEADER := $(HOTSPOT_TOPDIR)/src/share/vm/precompiled/precompiled.hpp
|
||||
endif
|
||||
|
||||
ifneq ($(filter $(OPENJDK_TARGET_OS), macosx aix solaris), )
|
||||
# On macosx, aix and solaris we have to link with the C++ compiler
|
||||
JVM_TOOLCHAIN := TOOLCHAIN_LINK_CXX
|
||||
else
|
||||
JVM_TOOLCHAIN := TOOLCHAIN_DEFAULT
|
||||
endif
|
||||
|
||||
ifeq ($(OPENJDK_TARGET_CPU), x86)
|
||||
JVM_EXCLUDE_PATTERNS += x86_64
|
||||
else ifeq ($(OPENJDK_TARGET_CPU), x86_64)
|
||||
@ -194,7 +187,7 @@ JVM_OPTIMIZATION ?= HIGHEST_JVM
|
||||
# Now set up the actual compilation of the main hotspot native library
|
||||
|
||||
$(eval $(call SetupNativeCompilation, BUILD_LIBJVM, \
|
||||
TOOLCHAIN := $(JVM_TOOLCHAIN), \
|
||||
TOOLCHAIN := TOOLCHAIN_LINK_CXX, \
|
||||
LIBRARY := jvm, \
|
||||
OUTPUT_DIR := $(JVM_OUTPUTDIR), \
|
||||
SRC := $(JVM_SRC_DIRS), \
|
||||
|
@ -68,6 +68,7 @@ public class InstanceKlass extends Klass {
|
||||
Type type = db.lookupType("InstanceKlass");
|
||||
arrayKlasses = new MetadataField(type.getAddressField("_array_klasses"), 0);
|
||||
methods = type.getAddressField("_methods");
|
||||
defaultMethods = type.getAddressField("_default_methods");
|
||||
methodOrdering = type.getAddressField("_method_ordering");
|
||||
localInterfaces = type.getAddressField("_local_interfaces");
|
||||
transitiveInterfaces = type.getAddressField("_transitive_interfaces");
|
||||
@ -128,6 +129,7 @@ public class InstanceKlass extends Klass {
|
||||
|
||||
private static MetadataField arrayKlasses;
|
||||
private static AddressField methods;
|
||||
private static AddressField defaultMethods;
|
||||
private static AddressField methodOrdering;
|
||||
private static AddressField localInterfaces;
|
||||
private static AddressField transitiveInterfaces;
|
||||
@ -335,6 +337,20 @@ public class InstanceKlass extends Klass {
|
||||
// Accessors for declared fields
|
||||
public Klass getArrayKlasses() { return (Klass) arrayKlasses.getValue(this); }
|
||||
public MethodArray getMethods() { return new MethodArray(methods.getValue(getAddress())); }
|
||||
|
||||
public MethodArray getDefaultMethods() {
|
||||
if (defaultMethods != null) {
|
||||
Address addr = defaultMethods.getValue(getAddress());
|
||||
if ((addr != null) && (addr.getAddressAt(0) != null)) {
|
||||
return new MethodArray(addr);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public KlassArray getLocalInterfaces() { return new KlassArray(localInterfaces.getValue(getAddress())); }
|
||||
public KlassArray getTransitiveInterfaces() { return new KlassArray(transitiveInterfaces.getValue(getAddress())); }
|
||||
public int getJavaFieldsCount() { return (int) javaFieldsCount.getValue(this); }
|
||||
|
@ -1358,7 +1358,7 @@ ClassFileStream* ClassLoader::search_module_entries(const GrowableArray<ModuleCl
|
||||
if (!Universe::is_module_initialized() &&
|
||||
!ModuleEntryTable::javabase_defined() &&
|
||||
mod_entry == NULL) {
|
||||
mod_entry = ModuleEntryTable::javabase_module();
|
||||
mod_entry = ModuleEntryTable::javabase_moduleEntry();
|
||||
}
|
||||
|
||||
// The module must be a named module
|
||||
@ -1708,7 +1708,7 @@ void ClassLoader::create_javabase() {
|
||||
if (jb_module == NULL) {
|
||||
vm_exit_during_initialization("Unable to create ModuleEntry for java.base");
|
||||
}
|
||||
ModuleEntryTable::set_javabase_module(jb_module);
|
||||
ModuleEntryTable::set_javabase_moduleEntry(jb_module);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -773,6 +773,41 @@ void java_lang_Class::initialize_mirror_fields(KlassHandle k,
|
||||
InstanceKlass::cast(k())->do_local_static_fields(&initialize_static_field, mirror, CHECK);
|
||||
}
|
||||
|
||||
// Set the java.lang.reflect.Module module field in the java_lang_Class mirror
|
||||
void java_lang_Class::set_mirror_module_field(KlassHandle k, Handle mirror, Handle module, TRAPS) {
|
||||
if (module.is_null()) {
|
||||
// During startup, the module may be NULL only if java.base has not been defined yet.
|
||||
// Put the class on the fixup_module_list to patch later when the java.lang.reflect.Module
|
||||
// for java.base is known.
|
||||
assert(!Universe::is_module_initialized(), "Incorrect java.lang.reflect.Module pre module system initialization");
|
||||
MutexLocker m1(Module_lock, THREAD);
|
||||
// Keep list of classes needing java.base module fixup
|
||||
if (!ModuleEntryTable::javabase_defined()) {
|
||||
if (fixup_module_field_list() == NULL) {
|
||||
GrowableArray<Klass*>* list =
|
||||
new (ResourceObj::C_HEAP, mtModule) GrowableArray<Klass*>(500, true);
|
||||
set_fixup_module_field_list(list);
|
||||
}
|
||||
k->class_loader_data()->inc_keep_alive();
|
||||
fixup_module_field_list()->push(k());
|
||||
} else {
|
||||
// java.base was defined at some point between calling create_mirror()
|
||||
// and obtaining the Module_lock, patch this particular class with java.base.
|
||||
ModuleEntry *javabase_entry = ModuleEntryTable::javabase_moduleEntry();
|
||||
assert(javabase_entry != NULL && javabase_entry->module() != NULL,
|
||||
"Setting class module field, java.base should be defined");
|
||||
Handle javabase_handle(THREAD, JNIHandles::resolve(javabase_entry->module()));
|
||||
set_module(mirror(), javabase_handle());
|
||||
}
|
||||
} else {
|
||||
assert(Universe::is_module_initialized() ||
|
||||
(ModuleEntryTable::javabase_defined() &&
|
||||
(module() == JNIHandles::resolve(ModuleEntryTable::javabase_moduleEntry()->module()))),
|
||||
"Incorrect java.lang.reflect.Module specification while creating mirror");
|
||||
set_module(mirror(), module());
|
||||
}
|
||||
}
|
||||
|
||||
void java_lang_Class::create_mirror(KlassHandle k, Handle class_loader,
|
||||
Handle module, Handle protection_domain, TRAPS) {
|
||||
assert(k->java_mirror() == NULL, "should only assign mirror once");
|
||||
@ -835,25 +870,13 @@ void java_lang_Class::create_mirror(KlassHandle k, Handle class_loader,
|
||||
set_class_loader(mirror(), class_loader());
|
||||
|
||||
// set the module field in the java_lang_Class instance
|
||||
// This may be null during bootstrap but will get fixed up later on.
|
||||
set_module(mirror(), module());
|
||||
set_mirror_module_field(k, mirror, module, THREAD);
|
||||
|
||||
// Setup indirection from klass->mirror last
|
||||
// after any exceptions can happen during allocations.
|
||||
if (!k.is_null()) {
|
||||
k->set_java_mirror(mirror());
|
||||
}
|
||||
|
||||
// Keep list of classes needing java.base module fixup.
|
||||
if (!ModuleEntryTable::javabase_defined()) {
|
||||
if (fixup_module_field_list() == NULL) {
|
||||
GrowableArray<Klass*>* list =
|
||||
new (ResourceObj::C_HEAP, mtModule) GrowableArray<Klass*>(500, true);
|
||||
set_fixup_module_field_list(list);
|
||||
}
|
||||
k->class_loader_data()->inc_keep_alive();
|
||||
fixup_module_field_list()->push(k());
|
||||
}
|
||||
} else {
|
||||
if (fixup_mirror_list() == NULL) {
|
||||
GrowableArray<Klass*>* list =
|
||||
|
@ -219,6 +219,7 @@ class java_lang_Class : AllStatic {
|
||||
static void set_class_loader(oop java_class, oop class_loader);
|
||||
static void set_component_mirror(oop java_class, oop comp_mirror);
|
||||
static void initialize_mirror_fields(KlassHandle k, Handle mirror, Handle protection_domain, TRAPS);
|
||||
static void set_mirror_module_field(KlassHandle K, Handle mirror, Handle module, TRAPS);
|
||||
public:
|
||||
static void compute_offsets();
|
||||
|
||||
|
@ -25,12 +25,85 @@
|
||||
#include "precompiled.hpp"
|
||||
#include "classfile/classFileParser.hpp"
|
||||
#include "classfile/classFileStream.hpp"
|
||||
#include "classfile/classLoader.hpp"
|
||||
#include "classfile/classLoaderData.hpp"
|
||||
#include "classfile/classLoaderData.inline.hpp"
|
||||
#include "classfile/klassFactory.hpp"
|
||||
#include "classfile/sharedClassUtil.hpp"
|
||||
#include "memory/metaspaceShared.hpp"
|
||||
#include "memory/resourceArea.hpp"
|
||||
#include "prims/jvmtiEnvBase.hpp"
|
||||
#include "prims/jvmtiRedefineClasses.hpp"
|
||||
#include "trace/traceMacros.hpp"
|
||||
|
||||
// called during initial loading of a shared class
|
||||
instanceKlassHandle KlassFactory::check_shared_class_file_load_hook(
|
||||
instanceKlassHandle ik,
|
||||
Symbol* class_name,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS) {
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
assert(ik.not_null(), "sanity");
|
||||
assert(ik()->is_shared(), "expecting a shared class");
|
||||
|
||||
if (JvmtiExport::should_post_class_file_load_hook()) {
|
||||
assert(THREAD->is_Java_thread(), "must be JavaThread");
|
||||
|
||||
// Post the CFLH
|
||||
JvmtiCachedClassFileData* cached_class_file = NULL;
|
||||
JvmtiCachedClassFileData* archived_class_data = ik->get_archived_class_data();
|
||||
assert(archived_class_data != NULL, "shared class has no archived class data");
|
||||
unsigned char* ptr =
|
||||
VM_RedefineClasses::get_cached_class_file_bytes(archived_class_data);
|
||||
unsigned char* end_ptr =
|
||||
ptr + VM_RedefineClasses::get_cached_class_file_len(archived_class_data);
|
||||
unsigned char* old_ptr = ptr;
|
||||
JvmtiExport::post_class_file_load_hook(class_name,
|
||||
class_loader,
|
||||
protection_domain,
|
||||
&ptr,
|
||||
&end_ptr,
|
||||
&cached_class_file);
|
||||
if (old_ptr != ptr) {
|
||||
// JVMTI agent has modified class file data.
|
||||
// Set new class file stream using JVMTI agent modified class file data.
|
||||
ClassLoaderData* loader_data =
|
||||
ClassLoaderData::class_loader_data(class_loader());
|
||||
int path_index = ik->shared_classpath_index();
|
||||
SharedClassPathEntry* ent =
|
||||
(SharedClassPathEntry*)FileMapInfo::shared_classpath(path_index);
|
||||
ClassFileStream* stream = new ClassFileStream(ptr,
|
||||
end_ptr - ptr,
|
||||
ent->_name,
|
||||
ClassFileStream::verify);
|
||||
ClassFileParser parser(stream,
|
||||
class_name,
|
||||
loader_data,
|
||||
protection_domain,
|
||||
NULL,
|
||||
NULL,
|
||||
ClassFileParser::BROADCAST, // publicity level
|
||||
CHECK_NULL);
|
||||
instanceKlassHandle new_ik = parser.create_instance_klass(true /* changed_by_loadhook */,
|
||||
CHECK_NULL);
|
||||
if (cached_class_file != NULL) {
|
||||
new_ik->set_cached_class_file(cached_class_file);
|
||||
}
|
||||
|
||||
if (class_loader.is_null()) {
|
||||
ResourceMark rm;
|
||||
ClassLoader::add_package(class_name->as_C_string(), path_index, THREAD);
|
||||
}
|
||||
|
||||
return new_ik;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
static ClassFileStream* check_class_file_load_hook(ClassFileStream* stream,
|
||||
Symbol* name,
|
||||
ClassLoaderData* loader_data,
|
||||
@ -97,7 +170,6 @@ instanceKlassHandle KlassFactory::create_from_stream(ClassFileStream* stream,
|
||||
const InstanceKlass* host_klass,
|
||||
GrowableArray<Handle>* cp_patches,
|
||||
TRAPS) {
|
||||
|
||||
assert(stream != NULL, "invariant");
|
||||
assert(loader_data != NULL, "invariant");
|
||||
assert(THREAD->is_Java_thread(), "must be a JavaThread");
|
||||
@ -142,5 +214,27 @@ instanceKlassHandle KlassFactory::create_from_stream(ClassFileStream* stream,
|
||||
|
||||
TRACE_KLASS_CREATION(result, parser, THREAD);
|
||||
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
if (DumpSharedSpaces) {
|
||||
assert(cached_class_file == NULL, "Sanity");
|
||||
// Archive the class stream data into the optional data section
|
||||
JvmtiCachedClassFileData *p;
|
||||
int len;
|
||||
const unsigned char *bytes;
|
||||
// event based tracing might set cached_class_file
|
||||
if ((bytes = result->get_cached_class_file_bytes()) != NULL) {
|
||||
len = result->get_cached_class_file_len();
|
||||
} else {
|
||||
len = stream->length();
|
||||
bytes = stream->buffer();
|
||||
}
|
||||
p = (JvmtiCachedClassFileData*)MetaspaceShared::optional_data_space_alloc(
|
||||
offset_of(JvmtiCachedClassFileData, data) + len);
|
||||
p->length = len;
|
||||
memcpy(p->data, bytes, len);
|
||||
result->set_archived_class_data(p);
|
||||
}
|
||||
#endif
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -75,6 +75,12 @@ class KlassFactory : AllStatic {
|
||||
const InstanceKlass* host_klass,
|
||||
GrowableArray<Handle>* cp_patches,
|
||||
TRAPS);
|
||||
public:
|
||||
static instanceKlassHandle check_shared_class_file_load_hook(
|
||||
instanceKlassHandle ik,
|
||||
Symbol* class_name,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS);
|
||||
};
|
||||
|
||||
#endif // SHARE_VM_CLASSFILE_KLASSFACTORY_HPP
|
||||
|
@ -92,7 +92,7 @@ bool ModuleEntry::can_read(ModuleEntry* m) const {
|
||||
// read java.base. If either of these conditions
|
||||
// hold, readability has been established.
|
||||
if (!this->is_named() ||
|
||||
(m == ModuleEntryTable::javabase_module())) {
|
||||
(m == ModuleEntryTable::javabase_moduleEntry())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -358,16 +358,27 @@ void ModuleEntryTable::finalize_javabase(Handle module_handle, Symbol* version,
|
||||
}
|
||||
|
||||
// Set java.lang.reflect.Module, version and location for java.base
|
||||
ModuleEntry* jb_module = javabase_module();
|
||||
ModuleEntry* jb_module = javabase_moduleEntry();
|
||||
assert(jb_module != NULL, "java.base ModuleEntry not defined");
|
||||
jb_module->set_module(boot_loader_data->add_handle(module_handle));
|
||||
jb_module->set_version(version);
|
||||
jb_module->set_location(location);
|
||||
// Once java.base's ModuleEntry _module field is set with the known
|
||||
// java.lang.reflect.Module, java.base is considered "defined" to the VM.
|
||||
jb_module->set_module(boot_loader_data->add_handle(module_handle));
|
||||
|
||||
// Store pointer to the ModuleEntry for java.base in the java.lang.reflect.Module object.
|
||||
java_lang_reflect_Module::set_module_entry(module_handle(), jb_module);
|
||||
|
||||
// Patch any previously loaded classes' module field with java.base's java.lang.reflect.Module.
|
||||
patch_javabase_entries(module_handle);
|
||||
}
|
||||
|
||||
// Within java.lang.Class instances there is a java.lang.reflect.Module field
|
||||
// that must be set with the defining module. During startup, prior to java.base's
|
||||
// definition, classes needing their module field set are added to the fixup_module_list.
|
||||
// Their module field is set once java.base's java.lang.reflect.Module is known to the VM.
|
||||
void ModuleEntryTable::patch_javabase_entries(Handle module_handle) {
|
||||
assert(Module_lock->owned_by_self(), "should have the Module_lock");
|
||||
if (module_handle.is_null()) {
|
||||
fatal("Unable to patch the module field of classes loaded prior to java.base's definition, invalid java.lang.reflect.Module");
|
||||
}
|
||||
@ -389,9 +400,7 @@ void ModuleEntryTable::patch_javabase_entries(Handle module_handle) {
|
||||
for (int i = 0; i < list_length; i++) {
|
||||
Klass* k = list->at(i);
|
||||
assert(k->is_klass(), "List should only hold classes");
|
||||
Thread* THREAD = Thread::current();
|
||||
KlassHandle kh(THREAD, k);
|
||||
java_lang_Class::fixup_module_field(kh, module_handle);
|
||||
java_lang_Class::fixup_module_field(KlassHandle(k), module_handle);
|
||||
k->class_loader_data()->dec_keep_alive();
|
||||
}
|
||||
|
||||
|
@ -78,11 +78,11 @@ public:
|
||||
_must_walk_reads = false;
|
||||
}
|
||||
|
||||
Symbol* name() const { return literal(); }
|
||||
void set_name(Symbol* n) { set_literal(n); }
|
||||
Symbol* name() const { return literal(); }
|
||||
void set_name(Symbol* n) { set_literal(n); }
|
||||
|
||||
jobject module() const { return _module; }
|
||||
void set_module(jobject j) { _module = j; }
|
||||
jobject module() const { return _module; }
|
||||
void set_module(jobject j) { _module = j; }
|
||||
|
||||
// The shared ProtectionDomain reference is set once the VM loads a shared class
|
||||
// originated from the current Module. The referenced ProtectionDomain object is
|
||||
@ -217,13 +217,13 @@ public:
|
||||
|
||||
// Special handling for unnamed module, one per class loader's ModuleEntryTable
|
||||
void create_unnamed_module(ClassLoaderData* loader_data);
|
||||
ModuleEntry* unnamed_module() { return _unnamed_module; }
|
||||
ModuleEntry* unnamed_module() { return _unnamed_module; }
|
||||
|
||||
// Special handling for java.base
|
||||
static ModuleEntry* javabase_module() { return _javabase_module; }
|
||||
static void set_javabase_module(ModuleEntry* java_base) { _javabase_module = java_base; }
|
||||
static bool javabase_defined() { return ((_javabase_module != NULL) &&
|
||||
(_javabase_module->module() != NULL)); }
|
||||
static ModuleEntry* javabase_moduleEntry() { return _javabase_module; }
|
||||
static void set_javabase_moduleEntry(ModuleEntry* java_base) { _javabase_module = java_base; }
|
||||
static bool javabase_defined() { return ((_javabase_module != NULL) &&
|
||||
(_javabase_module->module() != NULL)); }
|
||||
static void finalize_javabase(Handle module_handle, Symbol* version, Symbol* location);
|
||||
static void patch_javabase_entries(Handle module_handle);
|
||||
|
||||
|
@ -206,7 +206,7 @@ static void define_javabase_module(jobject module, jstring version,
|
||||
assert(pkg_list->length() == 0 || package_table != NULL, "Bad package_table");
|
||||
|
||||
// Ensure java.base's ModuleEntry has been created
|
||||
assert(ModuleEntryTable::javabase_module() != NULL, "No ModuleEntry for java.base");
|
||||
assert(ModuleEntryTable::javabase_moduleEntry() != NULL, "No ModuleEntry for java.base");
|
||||
|
||||
bool duplicate_javabase = false;
|
||||
{
|
||||
@ -226,7 +226,7 @@ static void define_javabase_module(jobject module, jstring version,
|
||||
for (int x = 0; x < pkg_list->length(); x++) {
|
||||
// Some of java.base's packages were added early in bootstrapping, ignore duplicates.
|
||||
if (package_table->lookup_only(pkg_list->at(x)) == NULL) {
|
||||
pkg = package_table->locked_create_entry_or_null(pkg_list->at(x), ModuleEntryTable::javabase_module());
|
||||
pkg = package_table->locked_create_entry_or_null(pkg_list->at(x), ModuleEntryTable::javabase_moduleEntry());
|
||||
assert(pkg != NULL, "Unable to create a java.base package entry");
|
||||
}
|
||||
// Unable to have a GrowableArray of TempNewSymbol. Must decrement the refcount of
|
||||
@ -255,9 +255,6 @@ static void define_javabase_module(jobject module, jstring version,
|
||||
log_trace(modules)("define_javabase_module(): creation of package %s for module java.base",
|
||||
(pkg_list->at(x))->as_C_string());
|
||||
}
|
||||
|
||||
// Patch any previously loaded classes' module field with java.base's jlr.Module.
|
||||
ModuleEntryTable::patch_javabase_entries(module_handle);
|
||||
}
|
||||
|
||||
void Modules::define_module(jobject module, jstring version,
|
||||
|
@ -1210,16 +1210,12 @@ Klass* SystemDictionary::find_shared_class(Symbol* class_name) {
|
||||
|
||||
instanceKlassHandle SystemDictionary::load_shared_class(
|
||||
Symbol* class_name, Handle class_loader, TRAPS) {
|
||||
// Don't load shared class when JvmtiExport::should_post_class_file_load_hook()
|
||||
// is enabled since posting CFLH is not supported when loading shared class.
|
||||
if (!JvmtiExport::should_post_class_file_load_hook()) {
|
||||
instanceKlassHandle ik (THREAD, find_shared_class(class_name));
|
||||
// Make sure we only return the boot class for the NULL classloader.
|
||||
if (ik.not_null() &&
|
||||
ik->is_shared_boot_class() && class_loader.is_null()) {
|
||||
Handle protection_domain;
|
||||
return load_shared_class(ik, class_loader, protection_domain, THREAD);
|
||||
}
|
||||
instanceKlassHandle ik (THREAD, find_shared_class(class_name));
|
||||
// Make sure we only return the boot class for the NULL classloader.
|
||||
if (ik.not_null() &&
|
||||
ik->is_shared_boot_class() && class_loader.is_null()) {
|
||||
Handle protection_domain;
|
||||
return load_shared_class(ik, class_loader, protection_domain, THREAD);
|
||||
}
|
||||
return instanceKlassHandle();
|
||||
}
|
||||
@ -1303,11 +1299,6 @@ instanceKlassHandle SystemDictionary::load_shared_class(instanceKlassHandle ik,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS) {
|
||||
instanceKlassHandle nh = instanceKlassHandle(); // null Handle
|
||||
if (JvmtiExport::should_post_class_file_load_hook()) {
|
||||
// Don't load shared class when JvmtiExport::should_post_class_file_load_hook()
|
||||
// is enabled since posting CFLH is not supported when loading shared class.
|
||||
return nh;
|
||||
}
|
||||
|
||||
if (ik.not_null()) {
|
||||
Symbol* class_name = ik->name();
|
||||
@ -1358,6 +1349,14 @@ instanceKlassHandle SystemDictionary::load_shared_class(instanceKlassHandle ik,
|
||||
}
|
||||
}
|
||||
|
||||
instanceKlassHandle new_ik = KlassFactory::check_shared_class_file_load_hook(
|
||||
ik, class_name, class_loader, protection_domain, CHECK_(nh));
|
||||
if (new_ik.not_null()) {
|
||||
// The class is changed by CFLH. Return the new class. The shared class is
|
||||
// not used.
|
||||
return new_ik;
|
||||
}
|
||||
|
||||
// Adjust methods to recover missing data. They need addresses for
|
||||
// interpreter entry points and their default native method address
|
||||
// must be reset.
|
||||
|
@ -1366,22 +1366,25 @@ bool ParNewGeneration::take_from_overflow_list_work(ParScanThreadState* par_scan
|
||||
return false;
|
||||
}
|
||||
assert(prefix != NULL && prefix != BUSY, "Error");
|
||||
size_t i = 1;
|
||||
oop cur = prefix;
|
||||
while (i < objsFromOverflow && cur->klass_or_null() != NULL) {
|
||||
i++; cur = cur->list_ptr_from_klass();
|
||||
for (size_t i = 1; i < objsFromOverflow; ++i) {
|
||||
oop next = cur->list_ptr_from_klass();
|
||||
if (next == NULL) break;
|
||||
cur = next;
|
||||
}
|
||||
assert(cur != NULL, "Loop postcondition");
|
||||
|
||||
// Reattach remaining (suffix) to overflow list
|
||||
if (cur->klass_or_null() == NULL) {
|
||||
oop suffix = cur->list_ptr_from_klass();
|
||||
if (suffix == NULL) {
|
||||
// Write back the NULL in lieu of the BUSY we wrote
|
||||
// above and it is still the same value.
|
||||
if (_overflow_list == BUSY) {
|
||||
(void) Atomic::cmpxchg_ptr(NULL, &_overflow_list, BUSY);
|
||||
}
|
||||
} else {
|
||||
assert(cur->klass_or_null() != (Klass*)(address)BUSY, "Error");
|
||||
oop suffix = cur->list_ptr_from_klass(); // suffix will be put back on global list
|
||||
assert(suffix != BUSY, "Error");
|
||||
// suffix will be put back on global list
|
||||
cur->set_klass_to_list_ptr(NULL); // break off suffix
|
||||
// It's possible that the list is still in the empty(busy) state
|
||||
// we left it in a short while ago; in that case we may be
|
||||
@ -1401,8 +1404,10 @@ bool ParNewGeneration::take_from_overflow_list_work(ParScanThreadState* par_scan
|
||||
// Too bad, someone else got in in between; we'll need to do a splice.
|
||||
// Find the last item of suffix list
|
||||
oop last = suffix;
|
||||
while (last->klass_or_null() != NULL) {
|
||||
last = last->list_ptr_from_klass();
|
||||
while (true) {
|
||||
oop next = last->list_ptr_from_klass();
|
||||
if (next == NULL) break;
|
||||
last = next;
|
||||
}
|
||||
// Atomically prepend suffix to current overflow list
|
||||
observed_overflow_list = _overflow_list;
|
||||
|
@ -1479,7 +1479,7 @@ void G1CollectedHeap::resize_if_necessary_after_full_collection() {
|
||||
"Capacity: " SIZE_FORMAT "B occupancy: " SIZE_FORMAT "B min_desired_capacity: " SIZE_FORMAT "B (" UINTX_FORMAT " %%)",
|
||||
capacity_after_gc, used_after_gc, minimum_desired_capacity, MinHeapFreeRatio);
|
||||
|
||||
expand(expand_bytes);
|
||||
expand(expand_bytes, _workers);
|
||||
|
||||
// No expansion, now see if we want to shrink
|
||||
} else if (capacity_after_gc > maximum_desired_capacity) {
|
||||
@ -1599,7 +1599,7 @@ HeapWord* G1CollectedHeap::expand_and_allocate(size_t word_size, AllocationConte
|
||||
word_size * HeapWordSize);
|
||||
|
||||
|
||||
if (expand(expand_bytes)) {
|
||||
if (expand(expand_bytes, _workers)) {
|
||||
_hrm.verify_optional();
|
||||
_verifier->verify_region_sets_optional();
|
||||
return attempt_allocation_at_safepoint(word_size,
|
||||
@ -1609,7 +1609,7 @@ HeapWord* G1CollectedHeap::expand_and_allocate(size_t word_size, AllocationConte
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool G1CollectedHeap::expand(size_t expand_bytes, double* expand_time_ms) {
|
||||
bool G1CollectedHeap::expand(size_t expand_bytes, WorkGang* pretouch_workers, double* expand_time_ms) {
|
||||
size_t aligned_expand_bytes = ReservedSpace::page_align_size_up(expand_bytes);
|
||||
aligned_expand_bytes = align_size_up(aligned_expand_bytes,
|
||||
HeapRegion::GrainBytes);
|
||||
@ -1626,7 +1626,7 @@ bool G1CollectedHeap::expand(size_t expand_bytes, double* expand_time_ms) {
|
||||
uint regions_to_expand = (uint)(aligned_expand_bytes / HeapRegion::GrainBytes);
|
||||
assert(regions_to_expand > 0, "Must expand by at least one region");
|
||||
|
||||
uint expanded_by = _hrm.expand_by(regions_to_expand);
|
||||
uint expanded_by = _hrm.expand_by(regions_to_expand, pretouch_workers);
|
||||
if (expand_time_ms != NULL) {
|
||||
*expand_time_ms = (os::elapsedTime() - expand_heap_start_time_sec) * MILLIUNITS;
|
||||
}
|
||||
@ -1927,7 +1927,7 @@ jint G1CollectedHeap::initialize() {
|
||||
_cmThread = _cm->cmThread();
|
||||
|
||||
// Now expand into the initial heap size.
|
||||
if (!expand(init_byte_size)) {
|
||||
if (!expand(init_byte_size, _workers)) {
|
||||
vm_shutdown_during_initialization("Failed to allocate initial heap.");
|
||||
return JNI_ENOMEM;
|
||||
}
|
||||
@ -3165,7 +3165,6 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
|
||||
|
||||
assert(_verifier->check_cset_fast_test(), "Inconsistency in the InCSetState table.");
|
||||
|
||||
_cm->note_start_of_gc();
|
||||
// We call this after finalize_cset() to
|
||||
// ensure that the CSet has been finalized.
|
||||
_cm->verify_no_cset_oops();
|
||||
@ -3241,7 +3240,7 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
|
||||
// No need for an ergo logging here,
|
||||
// expansion_amount() does this when it returns a value > 0.
|
||||
double expand_ms;
|
||||
if (!expand(expand_bytes, &expand_ms)) {
|
||||
if (!expand(expand_bytes, _workers, &expand_ms)) {
|
||||
// We failed to expand the heap. Cannot do anything about it.
|
||||
}
|
||||
g1_policy()->phase_times()->record_expand_heap_time(expand_ms);
|
||||
@ -3251,7 +3250,6 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
|
||||
// We redo the verification but now wrt to the new CSet which
|
||||
// has just got initialized after the previous CSet was freed.
|
||||
_cm->verify_no_cset_oops();
|
||||
_cm->note_end_of_gc();
|
||||
|
||||
// This timing is only used by the ergonomics to handle our pause target.
|
||||
// It is unclear why this should not include the full pause. We will
|
||||
|
@ -557,7 +557,7 @@ public:
|
||||
// Returns true if the heap was expanded by the requested amount;
|
||||
// false otherwise.
|
||||
// (Rounds up to a HeapRegion boundary.)
|
||||
bool expand(size_t expand_bytes, double* expand_time_ms = NULL);
|
||||
bool expand(size_t expand_bytes, WorkGang* pretouch_workers = NULL, double* expand_time_ms = NULL);
|
||||
|
||||
// Returns the PLAB statistics for a given destination.
|
||||
inline G1EvacStats* alloc_buffer_stats(InCSetState dest);
|
||||
|
@ -133,129 +133,184 @@ void G1CMBitMap::clear_range(MemRegion mr) {
|
||||
}
|
||||
|
||||
G1CMMarkStack::G1CMMarkStack() :
|
||||
_reserved_space(),
|
||||
_max_chunk_capacity(0),
|
||||
_base(NULL),
|
||||
_capacity(0),
|
||||
_saved_index((size_t)AllBits),
|
||||
_chunk_capacity(0),
|
||||
_out_of_memory(false),
|
||||
_should_expand(false) {
|
||||
set_empty();
|
||||
}
|
||||
|
||||
bool G1CMMarkStack::resize(size_t new_capacity) {
|
||||
assert(is_empty(), "Only resize when stack is empty.");
|
||||
assert(new_capacity <= MarkStackSizeMax,
|
||||
"Trying to resize stack to " SIZE_FORMAT " elements when the maximum is " SIZE_FORMAT, new_capacity, MarkStackSizeMax);
|
||||
assert(new_capacity <= _max_chunk_capacity,
|
||||
"Trying to resize stack to " SIZE_FORMAT " chunks when the maximum is " SIZE_FORMAT, new_capacity, _max_chunk_capacity);
|
||||
|
||||
size_t reservation_size = ReservedSpace::allocation_align_size_up(new_capacity * sizeof(oop));
|
||||
OopChunk* new_base = MmapArrayAllocator<OopChunk, mtGC>::allocate_or_null(new_capacity);
|
||||
|
||||
ReservedSpace rs(reservation_size);
|
||||
if (!rs.is_reserved()) {
|
||||
log_warning(gc)("Failed to reserve memory for new overflow mark stack with " SIZE_FORMAT " elements and size " SIZE_FORMAT "B.", new_capacity, reservation_size);
|
||||
if (new_base == NULL) {
|
||||
log_warning(gc)("Failed to reserve memory for new overflow mark stack with " SIZE_FORMAT " chunks and size " SIZE_FORMAT "B.", new_capacity, new_capacity * sizeof(OopChunk));
|
||||
return false;
|
||||
}
|
||||
|
||||
VirtualSpace vs;
|
||||
|
||||
if (!vs.initialize(rs, rs.size())) {
|
||||
rs.release();
|
||||
log_warning(gc)("Failed to commit memory for new overflow mark stack of size " SIZE_FORMAT "B.", rs.size());
|
||||
return false;
|
||||
}
|
||||
|
||||
assert(vs.committed_size() == rs.size(), "Failed to commit all of the mark stack.");
|
||||
|
||||
// Release old mapping.
|
||||
_reserved_space.release();
|
||||
if (_base != NULL) {
|
||||
MmapArrayAllocator<OopChunk, mtGC>::free(_base, _chunk_capacity);
|
||||
}
|
||||
|
||||
// Save new mapping for future unmapping.
|
||||
_reserved_space = rs;
|
||||
|
||||
MemTracker::record_virtual_memory_type((address)_reserved_space.base(), mtGC);
|
||||
|
||||
_base = (oop*) vs.low();
|
||||
_capacity = new_capacity;
|
||||
_base = new_base;
|
||||
_chunk_capacity = new_capacity;
|
||||
set_empty();
|
||||
_should_expand = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool G1CMMarkStack::allocate(size_t capacity) {
|
||||
return resize(capacity);
|
||||
size_t G1CMMarkStack::capacity_alignment() {
|
||||
return (size_t)lcm(os::vm_allocation_granularity(), sizeof(OopChunk)) / sizeof(void*);
|
||||
}
|
||||
|
||||
bool G1CMMarkStack::initialize(size_t initial_capacity, size_t max_capacity) {
|
||||
guarantee(_max_chunk_capacity == 0, "G1CMMarkStack already initialized.");
|
||||
|
||||
size_t const OopChunkSizeInVoidStar = sizeof(OopChunk) / sizeof(void*);
|
||||
|
||||
_max_chunk_capacity = (size_t)align_size_up(max_capacity, capacity_alignment()) / OopChunkSizeInVoidStar;
|
||||
size_t initial_chunk_capacity = (size_t)align_size_up(initial_capacity, capacity_alignment()) / OopChunkSizeInVoidStar;
|
||||
|
||||
guarantee(initial_chunk_capacity <= _max_chunk_capacity,
|
||||
"Maximum chunk capacity " SIZE_FORMAT " smaller than initial capacity " SIZE_FORMAT,
|
||||
_max_chunk_capacity,
|
||||
initial_chunk_capacity);
|
||||
|
||||
log_debug(gc)("Initialize mark stack with " SIZE_FORMAT " chunks, maximum " SIZE_FORMAT,
|
||||
initial_chunk_capacity, _max_chunk_capacity);
|
||||
|
||||
return resize(initial_chunk_capacity);
|
||||
}
|
||||
|
||||
void G1CMMarkStack::expand() {
|
||||
// Clear expansion flag
|
||||
_should_expand = false;
|
||||
|
||||
if (_capacity == MarkStackSizeMax) {
|
||||
log_debug(gc)("Can not expand overflow mark stack further, already at maximum capacity of " SIZE_FORMAT " elements.", _capacity);
|
||||
if (_chunk_capacity == _max_chunk_capacity) {
|
||||
log_debug(gc)("Can not expand overflow mark stack further, already at maximum capacity of " SIZE_FORMAT " chunks.", _chunk_capacity);
|
||||
return;
|
||||
}
|
||||
size_t old_capacity = _capacity;
|
||||
size_t old_capacity = _chunk_capacity;
|
||||
// Double capacity if possible
|
||||
size_t new_capacity = MIN2(old_capacity * 2, MarkStackSizeMax);
|
||||
size_t new_capacity = MIN2(old_capacity * 2, _max_chunk_capacity);
|
||||
|
||||
if (resize(new_capacity)) {
|
||||
log_debug(gc)("Expanded marking stack capacity from " SIZE_FORMAT " to " SIZE_FORMAT " elements",
|
||||
log_debug(gc)("Expanded mark stack capacity from " SIZE_FORMAT " to " SIZE_FORMAT " chunks",
|
||||
old_capacity, new_capacity);
|
||||
} else {
|
||||
log_warning(gc)("Failed to expand marking stack capacity from " SIZE_FORMAT " to " SIZE_FORMAT " elements",
|
||||
log_warning(gc)("Failed to expand mark stack capacity from " SIZE_FORMAT " to " SIZE_FORMAT " chunks",
|
||||
old_capacity, new_capacity);
|
||||
}
|
||||
}
|
||||
|
||||
G1CMMarkStack::~G1CMMarkStack() {
|
||||
if (_base != NULL) {
|
||||
_base = NULL;
|
||||
_reserved_space.release();
|
||||
MmapArrayAllocator<OopChunk, mtGC>::free(_base, _chunk_capacity);
|
||||
}
|
||||
}
|
||||
|
||||
void G1CMMarkStack::par_push_arr(oop* buffer, size_t n) {
|
||||
MutexLockerEx x(ParGCRareEvent_lock, Mutex::_no_safepoint_check_flag);
|
||||
size_t start = _index;
|
||||
size_t next_index = start + n;
|
||||
if (next_index > _capacity) {
|
||||
_overflow = true;
|
||||
return;
|
||||
}
|
||||
// Otherwise.
|
||||
_index = next_index;
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
size_t ind = start + i;
|
||||
assert(ind < _capacity, "By overflow test above.");
|
||||
_base[ind] = buffer[i];
|
||||
}
|
||||
void G1CMMarkStack::add_chunk_to_list(OopChunk* volatile* list, OopChunk* elem) {
|
||||
elem->next = *list;
|
||||
*list = elem;
|
||||
}
|
||||
|
||||
bool G1CMMarkStack::par_pop_arr(oop* buffer, size_t max, size_t* n) {
|
||||
MutexLockerEx x(ParGCRareEvent_lock, Mutex::_no_safepoint_check_flag);
|
||||
size_t index = _index;
|
||||
if (index == 0) {
|
||||
*n = 0;
|
||||
void G1CMMarkStack::add_chunk_to_chunk_list(OopChunk* elem) {
|
||||
MutexLockerEx x(MarkStackChunkList_lock, Mutex::_no_safepoint_check_flag);
|
||||
add_chunk_to_list(&_chunk_list, elem);
|
||||
_chunks_in_chunk_list++;
|
||||
}
|
||||
|
||||
void G1CMMarkStack::add_chunk_to_free_list(OopChunk* elem) {
|
||||
MutexLockerEx x(MarkStackFreeList_lock, Mutex::_no_safepoint_check_flag);
|
||||
add_chunk_to_list(&_free_list, elem);
|
||||
}
|
||||
|
||||
G1CMMarkStack::OopChunk* G1CMMarkStack::remove_chunk_from_list(OopChunk* volatile* list) {
|
||||
OopChunk* result = *list;
|
||||
if (result != NULL) {
|
||||
*list = (*list)->next;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
G1CMMarkStack::OopChunk* G1CMMarkStack::remove_chunk_from_chunk_list() {
|
||||
MutexLockerEx x(MarkStackChunkList_lock, Mutex::_no_safepoint_check_flag);
|
||||
OopChunk* result = remove_chunk_from_list(&_chunk_list);
|
||||
if (result != NULL) {
|
||||
_chunks_in_chunk_list--;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
G1CMMarkStack::OopChunk* G1CMMarkStack::remove_chunk_from_free_list() {
|
||||
MutexLockerEx x(MarkStackFreeList_lock, Mutex::_no_safepoint_check_flag);
|
||||
return remove_chunk_from_list(&_free_list);
|
||||
}
|
||||
|
||||
G1CMMarkStack::OopChunk* G1CMMarkStack::allocate_new_chunk() {
|
||||
// This dirty read of _hwm is okay because we only ever increase the _hwm in parallel code.
|
||||
// Further this limits _hwm to a value of _chunk_capacity + #threads, avoiding
|
||||
// wraparound of _hwm.
|
||||
if (_hwm >= _chunk_capacity) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
size_t cur_idx = Atomic::add(1, &_hwm) - 1;
|
||||
if (cur_idx >= _chunk_capacity) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
OopChunk* result = ::new (&_base[cur_idx]) OopChunk;
|
||||
result->next = NULL;
|
||||
return result;
|
||||
}
|
||||
|
||||
bool G1CMMarkStack::par_push_chunk(oop* ptr_arr) {
|
||||
// Get a new chunk.
|
||||
OopChunk* new_chunk = remove_chunk_from_free_list();
|
||||
|
||||
if (new_chunk == NULL) {
|
||||
// Did not get a chunk from the free list. Allocate from backing memory.
|
||||
new_chunk = allocate_new_chunk();
|
||||
}
|
||||
|
||||
if (new_chunk == NULL) {
|
||||
_out_of_memory = true;
|
||||
return false;
|
||||
} else {
|
||||
size_t k = MIN2(max, index);
|
||||
size_t new_ind = index - k;
|
||||
for (size_t j = 0; j < k; j++) {
|
||||
buffer[j] = _base[new_ind + j];
|
||||
}
|
||||
_index = new_ind;
|
||||
*n = k;
|
||||
return true;
|
||||
}
|
||||
|
||||
Copy::conjoint_memory_atomic(ptr_arr, new_chunk->data, OopsPerChunk * sizeof(oop));
|
||||
|
||||
add_chunk_to_chunk_list(new_chunk);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void G1CMMarkStack::note_start_of_gc() {
|
||||
assert(_saved_index == (size_t)AllBits, "note_start_of_gc()/end_of_gc() calls bracketed incorrectly");
|
||||
_saved_index = _index;
|
||||
bool G1CMMarkStack::par_pop_chunk(oop* ptr_arr) {
|
||||
OopChunk* cur = remove_chunk_from_chunk_list();
|
||||
|
||||
if (cur == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Copy::conjoint_memory_atomic(cur->data, ptr_arr, OopsPerChunk * sizeof(oop));
|
||||
|
||||
add_chunk_to_free_list(cur);
|
||||
return true;
|
||||
}
|
||||
|
||||
void G1CMMarkStack::note_end_of_gc() {
|
||||
guarantee(!stack_modified(), "Saved index " SIZE_FORMAT " must be the same as " SIZE_FORMAT, _saved_index, _index);
|
||||
|
||||
_saved_index = (size_t)AllBits;
|
||||
void G1CMMarkStack::set_empty() {
|
||||
_chunks_in_chunk_list = 0;
|
||||
_hwm = 0;
|
||||
clear_out_of_memory();
|
||||
_chunk_list = NULL;
|
||||
_free_list = NULL;
|
||||
}
|
||||
|
||||
G1CMRootRegions::G1CMRootRegions() :
|
||||
@ -483,9 +538,8 @@ G1ConcurrentMark::G1ConcurrentMark(G1CollectedHeap* g1h, G1RegionToSpaceMapper*
|
||||
}
|
||||
}
|
||||
|
||||
if (!_global_mark_stack.allocate(MarkStackSize)) {
|
||||
if (!_global_mark_stack.initialize(MarkStackSize, MarkStackSizeMax)) {
|
||||
vm_exit_during_initialization("Failed to allocate initial concurrent mark overflow mark stack.");
|
||||
return;
|
||||
}
|
||||
|
||||
_tasks = NEW_C_HEAP_ARRAY(G1CMTask*, _max_worker_id, mtGC);
|
||||
@ -1695,10 +1749,10 @@ void G1ConcurrentMark::weakRefsWork(bool clear_all_soft_refs) {
|
||||
// oop closures will set the has_overflown flag if we overflow the
|
||||
// global marking stack.
|
||||
|
||||
assert(_global_mark_stack.overflow() || _global_mark_stack.is_empty(),
|
||||
"mark stack should be empty (unless it overflowed)");
|
||||
assert(_global_mark_stack.is_out_of_memory() || _global_mark_stack.is_empty(),
|
||||
"Mark stack should be empty (unless it is out of memory)");
|
||||
|
||||
if (_global_mark_stack.overflow()) {
|
||||
if (_global_mark_stack.is_out_of_memory()) {
|
||||
// This should have been done already when we tried to push an
|
||||
// entry on to the global mark stack. But let's do it again.
|
||||
set_has_overflown();
|
||||
@ -2343,49 +2397,54 @@ void G1CMTask::decrease_limits() {
|
||||
}
|
||||
|
||||
void G1CMTask::move_entries_to_global_stack() {
|
||||
// local array where we'll store the entries that will be popped
|
||||
// from the local queue
|
||||
oop buffer[global_stack_transfer_size];
|
||||
// Local array where we'll store the entries that will be popped
|
||||
// from the local queue.
|
||||
oop buffer[G1CMMarkStack::OopsPerChunk];
|
||||
|
||||
int n = 0;
|
||||
size_t n = 0;
|
||||
oop obj;
|
||||
while (n < global_stack_transfer_size && _task_queue->pop_local(obj)) {
|
||||
while (n < G1CMMarkStack::OopsPerChunk && _task_queue->pop_local(obj)) {
|
||||
buffer[n] = obj;
|
||||
++n;
|
||||
}
|
||||
if (n < G1CMMarkStack::OopsPerChunk) {
|
||||
buffer[n] = NULL;
|
||||
}
|
||||
|
||||
if (n > 0) {
|
||||
// we popped at least one entry from the local queue
|
||||
|
||||
if (!_cm->mark_stack_push(buffer, n)) {
|
||||
if (!_cm->mark_stack_push(buffer)) {
|
||||
set_has_aborted();
|
||||
}
|
||||
}
|
||||
|
||||
// this operation was quite expensive, so decrease the limits
|
||||
// This operation was quite expensive, so decrease the limits.
|
||||
decrease_limits();
|
||||
}
|
||||
|
||||
void G1CMTask::get_entries_from_global_stack() {
|
||||
// local array where we'll store the entries that will be popped
|
||||
bool G1CMTask::get_entries_from_global_stack() {
|
||||
// Local array where we'll store the entries that will be popped
|
||||
// from the global stack.
|
||||
oop buffer[global_stack_transfer_size];
|
||||
size_t n;
|
||||
_cm->mark_stack_pop(buffer, global_stack_transfer_size, &n);
|
||||
assert(n <= global_stack_transfer_size,
|
||||
"we should not pop more than the given limit");
|
||||
if (n > 0) {
|
||||
// yes, we did actually pop at least one entry
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
bool success = _task_queue->push(buffer[i]);
|
||||
// We only call this when the local queue is empty or under a
|
||||
// given target limit. So, we do not expect this push to fail.
|
||||
assert(success, "invariant");
|
||||
}
|
||||
oop buffer[G1CMMarkStack::OopsPerChunk];
|
||||
|
||||
if (!_cm->mark_stack_pop(buffer)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// this operation was quite expensive, so decrease the limits
|
||||
// We did actually pop at least one entry.
|
||||
for (size_t i = 0; i < G1CMMarkStack::OopsPerChunk; ++i) {
|
||||
oop elem = buffer[i];
|
||||
if (elem == NULL) {
|
||||
break;
|
||||
}
|
||||
bool success = _task_queue->push(elem);
|
||||
// We only call this when the local queue is empty or under a
|
||||
// given target limit. So, we do not expect this push to fail.
|
||||
assert(success, "invariant");
|
||||
}
|
||||
|
||||
// This operation was quite expensive, so decrease the limits
|
||||
decrease_limits();
|
||||
return true;
|
||||
}
|
||||
|
||||
void G1CMTask::drain_local_queue(bool partially) {
|
||||
@ -2429,20 +2488,21 @@ void G1CMTask::drain_global_stack(bool partially) {
|
||||
|
||||
// Decide what the target size is, depending whether we're going to
|
||||
// drain it partially (so that other tasks can steal if they run out
|
||||
// of things to do) or totally (at the very end). Notice that,
|
||||
// because we move entries from the global stack in chunks or
|
||||
// because another task might be doing the same, we might in fact
|
||||
// drop below the target. But, this is not a problem.
|
||||
size_t target_size;
|
||||
// of things to do) or totally (at the very end).
|
||||
// Notice that when draining the global mark stack partially, due to the racyness
|
||||
// of the mark stack size update we might in fact drop below the target. But,
|
||||
// this is not a problem.
|
||||
// In case of total draining, we simply process until the global mark stack is
|
||||
// totally empty, disregarding the size counter.
|
||||
if (partially) {
|
||||
target_size = _cm->partial_mark_stack_size_target();
|
||||
} else {
|
||||
target_size = 0;
|
||||
}
|
||||
|
||||
if (_cm->mark_stack_size() > target_size) {
|
||||
size_t const target_size = _cm->partial_mark_stack_size_target();
|
||||
while (!has_aborted() && _cm->mark_stack_size() > target_size) {
|
||||
get_entries_from_global_stack();
|
||||
if (get_entries_from_global_stack()) {
|
||||
drain_local_queue(partially);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (!has_aborted() && get_entries_from_global_stack()) {
|
||||
drain_local_queue(partially);
|
||||
}
|
||||
}
|
||||
|
@ -149,42 +149,98 @@ class G1CMBitMap : public G1CMBitMapRO {
|
||||
//
|
||||
// Stores oops in a huge buffer in virtual memory that is always fully committed.
|
||||
// Resizing may only happen during a STW pause when the stack is empty.
|
||||
//
|
||||
// Memory is allocated on a "chunk" basis, i.e. a set of oops. For this, the mark
|
||||
// stack memory is split into evenly sized chunks of oops. Users can only
|
||||
// add or remove entries on that basis.
|
||||
// Chunks are filled in increasing address order. Not completely filled chunks
|
||||
// have a NULL element as a terminating element.
|
||||
//
|
||||
// Every chunk has a header containing a single pointer element used for memory
|
||||
// management. This wastes some space, but is negligible (< .1% with current sizing).
|
||||
//
|
||||
// Memory management is done using a mix of tracking a high water-mark indicating
|
||||
// that all chunks at a lower address are valid chunks, and a singly linked free
|
||||
// list connecting all empty chunks.
|
||||
class G1CMMarkStack VALUE_OBJ_CLASS_SPEC {
|
||||
ReservedSpace _reserved_space; // Space currently reserved for the mark stack.
|
||||
public:
|
||||
// Number of oops that can fit in a single chunk.
|
||||
static const size_t OopsPerChunk = 1024 - 1 /* One reference for the next pointer */;
|
||||
private:
|
||||
struct OopChunk {
|
||||
OopChunk* next;
|
||||
oop data[OopsPerChunk];
|
||||
};
|
||||
|
||||
oop* _base; // Bottom address of allocated memory area.
|
||||
size_t _capacity; // Maximum number of elements.
|
||||
size_t _index; // One more than last occupied index.
|
||||
size_t _max_chunk_capacity; // Maximum number of OopChunk elements on the stack.
|
||||
|
||||
size_t _saved_index; // Value of _index saved at start of GC to detect mark stack modifications during that time.
|
||||
OopChunk* _base; // Bottom address of allocated memory area.
|
||||
size_t _chunk_capacity; // Current maximum number of OopChunk elements.
|
||||
|
||||
char _pad0[DEFAULT_CACHE_LINE_SIZE];
|
||||
OopChunk* volatile _free_list; // Linked list of free chunks that can be allocated by users.
|
||||
char _pad1[DEFAULT_CACHE_LINE_SIZE - sizeof(OopChunk*)];
|
||||
OopChunk* volatile _chunk_list; // List of chunks currently containing data.
|
||||
volatile size_t _chunks_in_chunk_list;
|
||||
char _pad2[DEFAULT_CACHE_LINE_SIZE - sizeof(OopChunk*) - sizeof(size_t)];
|
||||
|
||||
volatile size_t _hwm; // High water mark within the reserved space.
|
||||
char _pad4[DEFAULT_CACHE_LINE_SIZE - sizeof(size_t)];
|
||||
|
||||
// Allocate a new chunk from the reserved memory, using the high water mark. Returns
|
||||
// NULL if out of memory.
|
||||
OopChunk* allocate_new_chunk();
|
||||
|
||||
volatile bool _out_of_memory;
|
||||
|
||||
// Atomically add the given chunk to the list.
|
||||
void add_chunk_to_list(OopChunk* volatile* list, OopChunk* elem);
|
||||
// Atomically remove and return a chunk from the given list. Returns NULL if the
|
||||
// list is empty.
|
||||
OopChunk* remove_chunk_from_list(OopChunk* volatile* list);
|
||||
|
||||
void add_chunk_to_chunk_list(OopChunk* elem);
|
||||
void add_chunk_to_free_list(OopChunk* elem);
|
||||
|
||||
OopChunk* remove_chunk_from_chunk_list();
|
||||
OopChunk* remove_chunk_from_free_list();
|
||||
|
||||
bool _overflow;
|
||||
bool _should_expand;
|
||||
|
||||
// Resizes the mark stack to the given new capacity. Releases any previous
|
||||
// memory if successful.
|
||||
bool resize(size_t new_capacity);
|
||||
|
||||
bool stack_modified() const { return _index != _saved_index; }
|
||||
public:
|
||||
G1CMMarkStack();
|
||||
~G1CMMarkStack();
|
||||
|
||||
bool allocate(size_t capacity);
|
||||
// Alignment and minimum capacity of this mark stack in number of oops.
|
||||
static size_t capacity_alignment();
|
||||
|
||||
// Pushes the first "n" elements of the given buffer on the stack.
|
||||
void par_push_arr(oop* buffer, size_t n);
|
||||
// Allocate and initialize the mark stack with the given number of oops.
|
||||
bool initialize(size_t initial_capacity, size_t max_capacity);
|
||||
|
||||
// Moves up to max elements from the stack into the given buffer. Returns
|
||||
// the number of elements pushed, and false if the array has been empty.
|
||||
// Returns true if the buffer contains at least one element.
|
||||
bool par_pop_arr(oop* buffer, size_t max, size_t* n);
|
||||
// Pushes the given buffer containing at most OopsPerChunk elements on the mark
|
||||
// stack. If less than OopsPerChunk elements are to be pushed, the array must
|
||||
// be terminated with a NULL.
|
||||
// Returns whether the buffer contents were successfully pushed to the global mark
|
||||
// stack.
|
||||
bool par_push_chunk(oop* buffer);
|
||||
|
||||
bool is_empty() const { return _index == 0; }
|
||||
size_t capacity() const { return _capacity; }
|
||||
// Pops a chunk from this mark stack, copying them into the given buffer. This
|
||||
// chunk may contain up to OopsPerChunk elements. If there are less, the last
|
||||
// element in the array is a NULL pointer.
|
||||
bool par_pop_chunk(oop* buffer);
|
||||
|
||||
bool overflow() const { return _overflow; }
|
||||
void clear_overflow() { _overflow = false; }
|
||||
// Return whether the chunk list is empty. Racy due to unsynchronized access to
|
||||
// _chunk_list.
|
||||
bool is_empty() const { return _chunk_list == NULL; }
|
||||
|
||||
size_t capacity() const { return _chunk_capacity; }
|
||||
|
||||
bool is_out_of_memory() const { return _out_of_memory; }
|
||||
void clear_out_of_memory() { _out_of_memory = false; }
|
||||
|
||||
bool should_expand() const { return _should_expand; }
|
||||
void set_should_expand(bool value) { _should_expand = value; }
|
||||
@ -192,20 +248,15 @@ class G1CMMarkStack VALUE_OBJ_CLASS_SPEC {
|
||||
// Expand the stack, typically in response to an overflow condition
|
||||
void expand();
|
||||
|
||||
size_t size() const { return _index; }
|
||||
// Return the approximate number of oops on this mark stack. Racy due to
|
||||
// unsynchronized access to _chunks_in_chunk_list.
|
||||
size_t size() const { return _chunks_in_chunk_list * OopsPerChunk; }
|
||||
|
||||
void set_empty() { _index = 0; clear_overflow(); }
|
||||
void set_empty();
|
||||
|
||||
// Record the current index.
|
||||
void note_start_of_gc();
|
||||
|
||||
// Make sure that we have not added any entries to the stack during GC.
|
||||
void note_end_of_gc();
|
||||
|
||||
// Apply fn to each oop in the mark stack, up to the bound recorded
|
||||
// via one of the above "note" functions. The mark stack must not
|
||||
// Apply Fn to every oop on the mark stack. The mark stack must not
|
||||
// be modified while iterating.
|
||||
template<typename Fn> void iterate(Fn fn);
|
||||
template<typename Fn> void iterate(Fn fn) const PRODUCT_RETURN;
|
||||
};
|
||||
|
||||
// Root Regions are regions that are not empty at the beginning of a
|
||||
@ -278,7 +329,6 @@ class G1ConcurrentMark: public CHeapObj<mtGC> {
|
||||
friend class G1CMDrainMarkingStackClosure;
|
||||
friend class G1CMBitMapClosure;
|
||||
friend class G1CMConcurrentMarkingTask;
|
||||
friend class G1CMMarkStack;
|
||||
friend class G1CMRemarkTask;
|
||||
friend class G1CMTask;
|
||||
|
||||
@ -479,22 +529,20 @@ protected:
|
||||
public:
|
||||
// Manipulation of the global mark stack.
|
||||
// The push and pop operations are used by tasks for transfers
|
||||
// between task-local queues and the global mark stack, and use
|
||||
// locking for concurrency safety.
|
||||
bool mark_stack_push(oop* arr, size_t n) {
|
||||
_global_mark_stack.par_push_arr(arr, n);
|
||||
if (_global_mark_stack.overflow()) {
|
||||
// between task-local queues and the global mark stack.
|
||||
bool mark_stack_push(oop* arr) {
|
||||
if (!_global_mark_stack.par_push_chunk(arr)) {
|
||||
set_has_overflown();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void mark_stack_pop(oop* arr, size_t max, size_t* n) {
|
||||
_global_mark_stack.par_pop_arr(arr, max, n);
|
||||
bool mark_stack_pop(oop* arr) {
|
||||
return _global_mark_stack.par_pop_chunk(arr);
|
||||
}
|
||||
size_t mark_stack_size() { return _global_mark_stack.size(); }
|
||||
size_t partial_mark_stack_size_target() { return _global_mark_stack.capacity()/3; }
|
||||
bool mark_stack_overflow() { return _global_mark_stack.overflow(); }
|
||||
bool mark_stack_overflow() { return _global_mark_stack.is_out_of_memory(); }
|
||||
bool mark_stack_empty() { return _global_mark_stack.is_empty(); }
|
||||
|
||||
G1CMRootRegions* root_regions() { return &_root_regions; }
|
||||
@ -599,16 +647,6 @@ public:
|
||||
// read-only, so use this carefully!
|
||||
void clearRangePrevBitmap(MemRegion mr);
|
||||
|
||||
// Notify data structures that a GC has started.
|
||||
void note_start_of_gc() {
|
||||
_global_mark_stack.note_start_of_gc();
|
||||
}
|
||||
|
||||
// Notify data structures that a GC is finished.
|
||||
void note_end_of_gc() {
|
||||
_global_mark_stack.note_end_of_gc();
|
||||
}
|
||||
|
||||
// Verify that there are no CSet oops on the stacks (taskqueues /
|
||||
// global mark stack) and fingers (global / per-task).
|
||||
// If marking is not in progress, it's a no-op.
|
||||
@ -670,10 +708,7 @@ private:
|
||||
// references reaches this limit
|
||||
refs_reached_period = 384,
|
||||
// Initial value for the hash seed, used in the work stealing code
|
||||
init_hash_seed = 17,
|
||||
// How many entries will be transferred between global stack and
|
||||
// local queues at once.
|
||||
global_stack_transfer_size = 1024
|
||||
init_hash_seed = 17
|
||||
};
|
||||
|
||||
uint _worker_id;
|
||||
@ -858,9 +893,10 @@ public:
|
||||
// It pushes an object on the local queue.
|
||||
inline void push(oop obj);
|
||||
|
||||
// These two move entries to/from the global stack.
|
||||
// Move entries to the global stack.
|
||||
void move_entries_to_global_stack();
|
||||
void get_entries_from_global_stack();
|
||||
// Move entries from the global stack, return true if we were successful to do so.
|
||||
bool get_entries_from_global_stack();
|
||||
|
||||
// It pops and scans objects from the local queue. If partially is
|
||||
// true, then it stops when the queue size is of a given limit. If
|
||||
|
@ -89,14 +89,28 @@ inline bool G1CMBitMap::parMark(HeapWord* addr) {
|
||||
|
||||
#undef check_mark
|
||||
|
||||
#ifndef PRODUCT
|
||||
template<typename Fn>
|
||||
inline void G1CMMarkStack::iterate(Fn fn) {
|
||||
inline void G1CMMarkStack::iterate(Fn fn) const {
|
||||
assert_at_safepoint(true);
|
||||
assert(!stack_modified(), "Saved index " SIZE_FORMAT " must be the same as " SIZE_FORMAT, _saved_index, _index);
|
||||
for (size_t i = 0; i < _index; ++i) {
|
||||
fn(_base[i]);
|
||||
|
||||
size_t num_chunks = 0;
|
||||
|
||||
OopChunk* cur = _chunk_list;
|
||||
while (cur != NULL) {
|
||||
guarantee(num_chunks <= _chunks_in_chunk_list, "Found " SIZE_FORMAT " oop chunks which is more than there should be", num_chunks);
|
||||
|
||||
for (size_t i = 0; i < OopsPerChunk; ++i) {
|
||||
if (cur->data[i] == NULL) {
|
||||
break;
|
||||
}
|
||||
fn(cur->data[i]);
|
||||
}
|
||||
cur = cur->next;
|
||||
num_chunks++;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// It scans an object and visits its children.
|
||||
inline void G1CMTask::scan_object(oop obj) { process_grey_object<true>(obj); }
|
||||
|
@ -34,7 +34,6 @@ class G1RemSet;
|
||||
class G1ConcurrentMark;
|
||||
class DirtyCardToOopClosure;
|
||||
class G1CMBitMap;
|
||||
class G1CMMarkStack;
|
||||
class G1ParScanThreadState;
|
||||
class G1CMTask;
|
||||
class ReferenceProcessor;
|
||||
|
@ -24,8 +24,10 @@
|
||||
|
||||
#include "precompiled.hpp"
|
||||
#include "gc/g1/g1PageBasedVirtualSpace.hpp"
|
||||
#include "gc/shared/workgroup.hpp"
|
||||
#include "oops/markOop.hpp"
|
||||
#include "oops/oop.inline.hpp"
|
||||
#include "runtime/atomic.hpp"
|
||||
#include "runtime/os.inline.hpp"
|
||||
#include "services/memTracker.hpp"
|
||||
#include "utilities/bitMap.inline.hpp"
|
||||
@ -177,7 +179,7 @@ void G1PageBasedVirtualSpace::pretouch_internal(size_t start_page, size_t end_pa
|
||||
guarantee(start_page < end_page,
|
||||
"Given start page " SIZE_FORMAT " is larger or equal to end page " SIZE_FORMAT, start_page, end_page);
|
||||
|
||||
os::pretouch_memory(page_start(start_page), bounded_end_addr(end_page));
|
||||
os::pretouch_memory(page_start(start_page), bounded_end_addr(end_page), _page_size);
|
||||
}
|
||||
|
||||
bool G1PageBasedVirtualSpace::commit(size_t start_page, size_t size_in_pages) {
|
||||
@ -198,9 +200,6 @@ bool G1PageBasedVirtualSpace::commit(size_t start_page, size_t size_in_pages) {
|
||||
}
|
||||
_committed.set_range(start_page, end_page);
|
||||
|
||||
if (AlwaysPreTouch) {
|
||||
pretouch_internal(start_page, end_page);
|
||||
}
|
||||
return zero_filled;
|
||||
}
|
||||
|
||||
@ -227,6 +226,53 @@ void G1PageBasedVirtualSpace::uncommit(size_t start_page, size_t size_in_pages)
|
||||
_committed.clear_range(start_page, end_page);
|
||||
}
|
||||
|
||||
class G1PretouchTask : public AbstractGangTask {
|
||||
private:
|
||||
char* volatile _cur_addr;
|
||||
char* const _start_addr;
|
||||
char* const _end_addr;
|
||||
size_t const _page_size;
|
||||
public:
|
||||
G1PretouchTask(char* start_address, char* end_address, size_t page_size) :
|
||||
AbstractGangTask("G1 PreTouch",
|
||||
Universe::is_fully_initialized() ? GCId::current_raw() :
|
||||
// During VM initialization there is
|
||||
// no GC cycle that this task can be
|
||||
// associated with.
|
||||
GCId::undefined()),
|
||||
_cur_addr(start_address),
|
||||
_start_addr(start_address),
|
||||
_end_addr(end_address),
|
||||
_page_size(page_size) {
|
||||
}
|
||||
|
||||
virtual void work(uint worker_id) {
|
||||
size_t const actual_chunk_size = MAX2(chunk_size(), _page_size);
|
||||
while (true) {
|
||||
char* touch_addr = (char*)Atomic::add_ptr((intptr_t)actual_chunk_size, (volatile void*) &_cur_addr) - actual_chunk_size;
|
||||
if (touch_addr < _start_addr || touch_addr >= _end_addr) {
|
||||
break;
|
||||
}
|
||||
char* end_addr = touch_addr + MIN2(actual_chunk_size, pointer_delta(_end_addr, touch_addr, sizeof(char)));
|
||||
os::pretouch_memory(touch_addr, end_addr, _page_size);
|
||||
}
|
||||
}
|
||||
|
||||
static size_t chunk_size() { return PreTouchParallelChunkSize; }
|
||||
};
|
||||
|
||||
void G1PageBasedVirtualSpace::pretouch(size_t start_page, size_t size_in_pages, WorkGang* pretouch_gang) {
|
||||
guarantee(pretouch_gang != NULL, "No pretouch gang specified.");
|
||||
|
||||
size_t num_chunks = MAX2((size_t)1, size_in_pages * _page_size / MAX2(G1PretouchTask::chunk_size(), _page_size));
|
||||
|
||||
uint num_workers = MIN2((uint)num_chunks, pretouch_gang->active_workers());
|
||||
G1PretouchTask cl(page_start(start_page), bounded_end_addr(start_page + size_in_pages), _page_size);
|
||||
log_debug(gc, heap)("Running %s with %u workers for " SIZE_FORMAT " work units pre-touching " SIZE_FORMAT "B.",
|
||||
cl.name(), num_workers, num_chunks, size_in_pages * _page_size);
|
||||
pretouch_gang->run_task(&cl, num_workers);
|
||||
}
|
||||
|
||||
bool G1PageBasedVirtualSpace::contains(const void* p) const {
|
||||
return _low_boundary <= (const char*) p && (const char*) p < _high_boundary;
|
||||
}
|
||||
|
@ -30,6 +30,8 @@
|
||||
#include "memory/virtualspace.hpp"
|
||||
#include "utilities/bitMap.hpp"
|
||||
|
||||
class WorkGang;
|
||||
|
||||
// Virtual space management helper for a virtual space with an OS page allocation
|
||||
// granularity.
|
||||
// (De-)Allocation requests are always OS page aligned by passing a page index
|
||||
@ -117,6 +119,8 @@ class G1PageBasedVirtualSpace VALUE_OBJ_CLASS_SPEC {
|
||||
// Uncommit the given area of pages starting at start being size_in_pages large.
|
||||
void uncommit(size_t start_page, size_t size_in_pages);
|
||||
|
||||
void pretouch(size_t start_page, size_t size_in_pages, WorkGang* pretouch_gang = NULL);
|
||||
|
||||
// Initialize the given reserved space with the given base address and the size
|
||||
// actually used.
|
||||
// Prefer to commit in page_size chunks.
|
||||
|
@ -66,8 +66,12 @@ class G1RegionsLargerThanCommitSizeMapper : public G1RegionToSpaceMapper {
|
||||
guarantee(alloc_granularity >= page_size, "allocation granularity smaller than commit granularity");
|
||||
}
|
||||
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions) {
|
||||
bool zero_filled = _storage.commit((size_t)start_idx * _pages_per_region, num_regions * _pages_per_region);
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions, WorkGang* pretouch_gang) {
|
||||
size_t const start_page = (size_t)start_idx * _pages_per_region;
|
||||
bool zero_filled = _storage.commit(start_page, num_regions * _pages_per_region);
|
||||
if (AlwaysPreTouch) {
|
||||
_storage.pretouch(start_page, num_regions * _pages_per_region, pretouch_gang);
|
||||
}
|
||||
_commit_map.set_range(start_idx, start_idx + num_regions);
|
||||
fire_on_commit(start_idx, num_regions, zero_filled);
|
||||
}
|
||||
@ -110,19 +114,38 @@ class G1RegionsSmallerThanCommitSizeMapper : public G1RegionToSpaceMapper {
|
||||
_refcounts.initialize((HeapWord*)rs.base(), (HeapWord*)(rs.base() + align_size_up(rs.size(), page_size)), page_size);
|
||||
}
|
||||
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions) {
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions, WorkGang* pretouch_gang) {
|
||||
size_t const NoPage = ~(size_t)0;
|
||||
|
||||
size_t first_committed = NoPage;
|
||||
size_t num_committed = 0;
|
||||
|
||||
bool all_zero_filled = true;
|
||||
|
||||
for (uint i = start_idx; i < start_idx + num_regions; i++) {
|
||||
assert(!_commit_map.at(i), "Trying to commit storage at region %u that is already committed", i);
|
||||
size_t idx = region_idx_to_page_idx(i);
|
||||
uint old_refcount = _refcounts.get_by_index(idx);
|
||||
|
||||
bool zero_filled = false;
|
||||
if (old_refcount == 0) {
|
||||
if (first_committed == NoPage) {
|
||||
first_committed = idx;
|
||||
num_committed = 1;
|
||||
} else {
|
||||
num_committed++;
|
||||
}
|
||||
zero_filled = _storage.commit(idx, 1);
|
||||
}
|
||||
all_zero_filled &= zero_filled;
|
||||
|
||||
_refcounts.set_by_index(idx, old_refcount + 1);
|
||||
_commit_map.set_bit(i);
|
||||
fire_on_commit(i, 1, zero_filled);
|
||||
}
|
||||
if (AlwaysPreTouch && num_committed > 0) {
|
||||
_storage.pretouch(first_committed, num_committed, pretouch_gang);
|
||||
}
|
||||
fire_on_commit(start_idx, num_regions, all_zero_filled);
|
||||
}
|
||||
|
||||
virtual void uncommit_regions(uint start_idx, size_t num_regions) {
|
||||
|
@ -29,6 +29,8 @@
|
||||
#include "memory/allocation.hpp"
|
||||
#include "utilities/debug.hpp"
|
||||
|
||||
class WorkGang;
|
||||
|
||||
class G1MappingChangedListener VALUE_OBJ_CLASS_SPEC {
|
||||
public:
|
||||
// Fired after commit of the memory, i.e. the memory this listener is registered
|
||||
@ -68,7 +70,7 @@ class G1RegionToSpaceMapper : public CHeapObj<mtGC> {
|
||||
return _commit_map.at(idx);
|
||||
}
|
||||
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions = 1) = 0;
|
||||
virtual void commit_regions(uint start_idx, size_t num_regions = 1, WorkGang* pretouch_workers = NULL) = 0;
|
||||
virtual void uncommit_regions(uint start_idx, size_t num_regions = 1) = 0;
|
||||
|
||||
// Creates an appropriate G1RegionToSpaceMapper for the given parameters.
|
||||
|
@ -352,35 +352,6 @@ void HeapRegion::note_self_forwarding_removal_end(bool during_initial_mark,
|
||||
_prev_marked_bytes = marked_bytes;
|
||||
}
|
||||
|
||||
HeapWord*
|
||||
HeapRegion::object_iterate_mem_careful(MemRegion mr,
|
||||
ObjectClosure* cl) {
|
||||
G1CollectedHeap* g1h = G1CollectedHeap::heap();
|
||||
// We used to use "block_start_careful" here. But we're actually happy
|
||||
// to update the BOT while we do this...
|
||||
HeapWord* cur = block_start(mr.start());
|
||||
mr = mr.intersection(used_region());
|
||||
if (mr.is_empty()) return NULL;
|
||||
// Otherwise, find the obj that extends onto mr.start().
|
||||
|
||||
assert(cur <= mr.start()
|
||||
&& (oop(cur)->klass_or_null() == NULL ||
|
||||
cur + oop(cur)->size() > mr.start()),
|
||||
"postcondition of block_start");
|
||||
oop obj;
|
||||
while (cur < mr.end()) {
|
||||
obj = oop(cur);
|
||||
if (obj->klass_or_null() == NULL) {
|
||||
// Ran into an unparseable point.
|
||||
return cur;
|
||||
} else if (!g1h->is_obj_dead(obj)) {
|
||||
cl->do_object(obj);
|
||||
}
|
||||
cur += block_size(cur);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
HeapWord*
|
||||
HeapRegion::
|
||||
oops_on_card_seq_iterate_careful(MemRegion mr,
|
||||
|
@ -653,17 +653,6 @@ class HeapRegion: public G1ContiguousSpace {
|
||||
}
|
||||
}
|
||||
|
||||
// Requires that "mr" be entirely within the region.
|
||||
// Apply "cl->do_object" to all objects that intersect with "mr".
|
||||
// If the iteration encounters an unparseable portion of the region,
|
||||
// or if "cl->abort()" is true after a closure application,
|
||||
// terminate the iteration and return the address of the start of the
|
||||
// subregion that isn't done. (The two can be distinguished by querying
|
||||
// "cl->abort()".) Return of "NULL" indicates that the iteration
|
||||
// completed.
|
||||
HeapWord*
|
||||
object_iterate_mem_careful(MemRegion mr, ObjectClosure* cl);
|
||||
|
||||
// filter_young: if true and the region is a young region then we
|
||||
// skip the iteration.
|
||||
// card_ptr: if not NULL, and we decide that the card is not young
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2001, 2015, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2001, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -72,22 +72,22 @@ HeapRegion* HeapRegionManager::new_heap_region(uint hrm_index) {
|
||||
return g1h->new_heap_region(hrm_index, mr);
|
||||
}
|
||||
|
||||
void HeapRegionManager::commit_regions(uint index, size_t num_regions) {
|
||||
void HeapRegionManager::commit_regions(uint index, size_t num_regions, WorkGang* pretouch_gang) {
|
||||
guarantee(num_regions > 0, "Must commit more than zero regions");
|
||||
guarantee(_num_committed + num_regions <= max_length(), "Cannot commit more than the maximum amount of regions");
|
||||
|
||||
_num_committed += (uint)num_regions;
|
||||
|
||||
_heap_mapper->commit_regions(index, num_regions);
|
||||
_heap_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
|
||||
// Also commit auxiliary data
|
||||
_prev_bitmap_mapper->commit_regions(index, num_regions);
|
||||
_next_bitmap_mapper->commit_regions(index, num_regions);
|
||||
_prev_bitmap_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
_next_bitmap_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
|
||||
_bot_mapper->commit_regions(index, num_regions);
|
||||
_cardtable_mapper->commit_regions(index, num_regions);
|
||||
_bot_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
_cardtable_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
|
||||
_card_counts_mapper->commit_regions(index, num_regions);
|
||||
_card_counts_mapper->commit_regions(index, num_regions, pretouch_gang);
|
||||
}
|
||||
|
||||
void HeapRegionManager::uncommit_regions(uint start, size_t num_regions) {
|
||||
@ -117,9 +117,9 @@ void HeapRegionManager::uncommit_regions(uint start, size_t num_regions) {
|
||||
_card_counts_mapper->uncommit_regions(start, num_regions);
|
||||
}
|
||||
|
||||
void HeapRegionManager::make_regions_available(uint start, uint num_regions) {
|
||||
void HeapRegionManager::make_regions_available(uint start, uint num_regions, WorkGang* pretouch_gang) {
|
||||
guarantee(num_regions > 0, "No point in calling this for zero regions");
|
||||
commit_regions(start, num_regions);
|
||||
commit_regions(start, num_regions, pretouch_gang);
|
||||
for (uint i = start; i < start + num_regions; i++) {
|
||||
if (_regions.get_by_index(i) == NULL) {
|
||||
HeapRegion* new_hr = new_heap_region(i);
|
||||
@ -163,11 +163,11 @@ MemoryUsage HeapRegionManager::get_auxiliary_data_memory_usage() const {
|
||||
return MemoryUsage(0, used_sz, committed_sz, committed_sz);
|
||||
}
|
||||
|
||||
uint HeapRegionManager::expand_by(uint num_regions) {
|
||||
return expand_at(0, num_regions);
|
||||
uint HeapRegionManager::expand_by(uint num_regions, WorkGang* pretouch_workers) {
|
||||
return expand_at(0, num_regions, pretouch_workers);
|
||||
}
|
||||
|
||||
uint HeapRegionManager::expand_at(uint start, uint num_regions) {
|
||||
uint HeapRegionManager::expand_at(uint start, uint num_regions, WorkGang* pretouch_workers) {
|
||||
if (num_regions == 0) {
|
||||
return 0;
|
||||
}
|
||||
@ -181,7 +181,7 @@ uint HeapRegionManager::expand_at(uint start, uint num_regions) {
|
||||
while (expanded < num_regions &&
|
||||
(num_last_found = find_unavailable_from_idx(cur, &idx_last_found)) > 0) {
|
||||
uint to_expand = MIN2(num_regions - expanded, num_last_found);
|
||||
make_regions_available(idx_last_found, to_expand);
|
||||
make_regions_available(idx_last_found, to_expand, pretouch_workers);
|
||||
expanded += to_expand;
|
||||
cur = idx_last_found + num_last_found + 1;
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2001, 2015, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2001, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -34,6 +34,7 @@ class HeapRegion;
|
||||
class HeapRegionClosure;
|
||||
class HeapRegionClaimer;
|
||||
class FreeRegionList;
|
||||
class WorkGang;
|
||||
|
||||
class G1HeapRegionTable : public G1BiasedMappedArray<HeapRegion*> {
|
||||
protected:
|
||||
@ -94,10 +95,10 @@ class HeapRegionManager: public CHeapObj<mtGC> {
|
||||
HeapWord* heap_bottom() const { return _regions.bottom_address_mapped(); }
|
||||
HeapWord* heap_end() const {return _regions.end_address_mapped(); }
|
||||
|
||||
void make_regions_available(uint index, uint num_regions = 1);
|
||||
void make_regions_available(uint index, uint num_regions = 1, WorkGang* pretouch_gang = NULL);
|
||||
|
||||
// Pass down commit calls to the VirtualSpace.
|
||||
void commit_regions(uint index, size_t num_regions = 1);
|
||||
void commit_regions(uint index, size_t num_regions = 1, WorkGang* pretouch_gang = NULL);
|
||||
void uncommit_regions(uint index, size_t num_regions = 1);
|
||||
|
||||
// Notify other data structures about change in the heap layout.
|
||||
@ -209,12 +210,12 @@ public:
|
||||
// HeapRegions, or re-use existing ones. Returns the number of regions the
|
||||
// sequence was expanded by. If a HeapRegion allocation fails, the resulting
|
||||
// number of regions might be smaller than what's desired.
|
||||
uint expand_by(uint num_regions);
|
||||
uint expand_by(uint num_regions, WorkGang* pretouch_workers = NULL);
|
||||
|
||||
// Makes sure that the regions from start to start+num_regions-1 are available
|
||||
// for allocation. Returns the number of regions that were committed to achieve
|
||||
// this.
|
||||
uint expand_at(uint start, uint num_regions);
|
||||
uint expand_at(uint start, uint num_regions, WorkGang* pretouch_workers = NULL);
|
||||
|
||||
// Find a contiguous set of empty regions of length num. Returns the start index of
|
||||
// that set, or G1_NO_HRM_INDEX.
|
||||
|
@ -304,9 +304,6 @@ class CollectedHeap : public CHeapObj<mtInternal> {
|
||||
inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
|
||||
inline static oop class_allocate(KlassHandle klass, int size, TRAPS);
|
||||
|
||||
inline static void post_allocation_install_obj_klass(KlassHandle klass,
|
||||
oop obj);
|
||||
|
||||
// Raw memory allocation facilities
|
||||
// The obj and array allocate methods are covers for these methods.
|
||||
// mem_allocate() should never be
|
||||
|
@ -41,14 +41,22 @@
|
||||
// Inline allocation implementations.
|
||||
|
||||
void CollectedHeap::post_allocation_setup_common(KlassHandle klass,
|
||||
HeapWord* obj) {
|
||||
post_allocation_setup_no_klass_install(klass, obj);
|
||||
post_allocation_install_obj_klass(klass, oop(obj));
|
||||
HeapWord* obj_ptr) {
|
||||
post_allocation_setup_no_klass_install(klass, obj_ptr);
|
||||
oop obj = (oop)obj_ptr;
|
||||
#if ! INCLUDE_ALL_GCS
|
||||
obj->set_klass(klass());
|
||||
#else
|
||||
// Need a release store to ensure array/class length, mark word, and
|
||||
// object zeroing are visible before setting the klass non-NULL, for
|
||||
// concurrent collectors.
|
||||
obj->release_set_klass(klass());
|
||||
#endif
|
||||
}
|
||||
|
||||
void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass,
|
||||
HeapWord* objPtr) {
|
||||
oop obj = (oop)objPtr;
|
||||
HeapWord* obj_ptr) {
|
||||
oop obj = (oop)obj_ptr;
|
||||
|
||||
assert(obj != NULL, "NULL object pointer");
|
||||
if (UseBiasedLocking && (klass() != NULL)) {
|
||||
@ -59,18 +67,6 @@ void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass,
|
||||
}
|
||||
}
|
||||
|
||||
void CollectedHeap::post_allocation_install_obj_klass(KlassHandle klass,
|
||||
oop obj) {
|
||||
// These asserts are kind of complicated because of klassKlass
|
||||
// and the beginning of the world.
|
||||
assert(klass() != NULL || !Universe::is_fully_initialized(), "NULL klass");
|
||||
assert(klass() == NULL || klass()->is_klass(), "not a klass");
|
||||
assert(obj != NULL, "NULL object pointer");
|
||||
obj->set_klass(klass());
|
||||
assert(!Universe::is_fully_initialized() || obj->klass() != NULL,
|
||||
"missing klass");
|
||||
}
|
||||
|
||||
// Support for jvmti and dtrace
|
||||
inline void post_allocation_notify(KlassHandle klass, oop obj, int size) {
|
||||
// support low memory notifications (no-op if not enabled)
|
||||
@ -88,25 +84,26 @@ inline void post_allocation_notify(KlassHandle klass, oop obj, int size) {
|
||||
}
|
||||
|
||||
void CollectedHeap::post_allocation_setup_obj(KlassHandle klass,
|
||||
HeapWord* obj,
|
||||
HeapWord* obj_ptr,
|
||||
int size) {
|
||||
post_allocation_setup_common(klass, obj);
|
||||
post_allocation_setup_common(klass, obj_ptr);
|
||||
oop obj = (oop)obj_ptr;
|
||||
assert(Universe::is_bootstrapping() ||
|
||||
!((oop)obj)->is_array(), "must not be an array");
|
||||
!obj->is_array(), "must not be an array");
|
||||
// notify jvmti and dtrace
|
||||
post_allocation_notify(klass, (oop)obj, size);
|
||||
post_allocation_notify(klass, obj, size);
|
||||
}
|
||||
|
||||
void CollectedHeap::post_allocation_setup_class(KlassHandle klass,
|
||||
HeapWord* obj,
|
||||
HeapWord* obj_ptr,
|
||||
int size) {
|
||||
// Set oop_size field before setting the _klass field
|
||||
// in post_allocation_setup_common() because the klass field
|
||||
// indicates that the object is parsable by concurrent GC.
|
||||
oop new_cls = (oop)obj;
|
||||
// Set oop_size field before setting the _klass field because a
|
||||
// non-NULL _klass field indicates that the object is parsable by
|
||||
// concurrent GC.
|
||||
oop new_cls = (oop)obj_ptr;
|
||||
assert(size > 0, "oop_size must be positive.");
|
||||
java_lang_Class::set_oop_size(new_cls, size);
|
||||
post_allocation_setup_common(klass, obj);
|
||||
post_allocation_setup_common(klass, obj_ptr);
|
||||
assert(Universe::is_bootstrapping() ||
|
||||
!new_cls->is_array(), "must not be an array");
|
||||
// notify jvmti and dtrace
|
||||
@ -114,15 +111,15 @@ void CollectedHeap::post_allocation_setup_class(KlassHandle klass,
|
||||
}
|
||||
|
||||
void CollectedHeap::post_allocation_setup_array(KlassHandle klass,
|
||||
HeapWord* obj,
|
||||
HeapWord* obj_ptr,
|
||||
int length) {
|
||||
// Set array length before setting the _klass field
|
||||
// in post_allocation_setup_common() because the klass field
|
||||
// indicates that the object is parsable by concurrent GC.
|
||||
// Set array length before setting the _klass field because a
|
||||
// non-NULL klass field indicates that the object is parsable by
|
||||
// concurrent GC.
|
||||
assert(length >= 0, "length should be non-negative");
|
||||
((arrayOop)obj)->set_length(length);
|
||||
post_allocation_setup_common(klass, obj);
|
||||
oop new_obj = (oop)obj;
|
||||
((arrayOop)obj_ptr)->set_length(length);
|
||||
post_allocation_setup_common(klass, obj_ptr);
|
||||
oop new_obj = (oop)obj_ptr;
|
||||
assert(new_obj->is_array(), "must be an array");
|
||||
// notify jvmti and dtrace (must be after length is set for dtrace)
|
||||
post_allocation_notify(klass, new_obj, new_obj->size());
|
||||
|
@ -62,7 +62,12 @@ class AbstractGangTask VALUE_OBJ_CLASS_SPEC {
|
||||
AbstractGangTask(const char* name) :
|
||||
_name(name),
|
||||
_gc_id(GCId::current_raw())
|
||||
{}
|
||||
{}
|
||||
|
||||
AbstractGangTask(const char* name, const uint gc_id) :
|
||||
_name(name),
|
||||
_gc_id(gc_id)
|
||||
{}
|
||||
|
||||
// The abstract work method.
|
||||
// The argument tells you which member of the gang you are.
|
||||
|
@ -738,6 +738,7 @@ class MmapArrayAllocator : public AllStatic {
|
||||
static size_t size_for(size_t length);
|
||||
|
||||
public:
|
||||
static E* allocate_or_null(size_t length);
|
||||
static E* allocate(size_t length);
|
||||
static void free(E* addr, size_t length);
|
||||
};
|
||||
|
@ -152,6 +152,24 @@ size_t MmapArrayAllocator<E, F>::size_for(size_t length) {
|
||||
return align_size_up(size, alignment);
|
||||
}
|
||||
|
||||
template <class E, MEMFLAGS F>
|
||||
E* MmapArrayAllocator<E, F>::allocate_or_null(size_t length) {
|
||||
size_t size = size_for(length);
|
||||
int alignment = os::vm_allocation_granularity();
|
||||
|
||||
char* addr = os::reserve_memory(size, NULL, alignment, F);
|
||||
if (addr == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (os::commit_memory(addr, size, !ExecMem, "Allocator (commit)")) {
|
||||
return (E*)addr;
|
||||
} else {
|
||||
os::release_memory(addr, size);
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
template <class E, MEMFLAGS F>
|
||||
E* MmapArrayAllocator<E, F>::allocate(size_t length) {
|
||||
size_t size = size_for(length);
|
||||
|
@ -649,7 +649,7 @@ ReservedSpace FileMapInfo::reserve_shared_memory() {
|
||||
|
||||
// Memory map a region in the address space.
|
||||
static const char* shared_region_name[] = { "ReadOnly", "ReadWrite", "MiscData", "MiscCode",
|
||||
"String1", "String2" };
|
||||
"String1", "String2", "OptionalData" };
|
||||
|
||||
char* FileMapInfo::map_region(int i) {
|
||||
assert(!MetaspaceShared::is_string_region(i), "sanity");
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -252,10 +252,27 @@ public:
|
||||
bool is_in_shared_region(const void* p, int idx) NOT_CDS_RETURN_(false);
|
||||
void print_shared_spaces() NOT_CDS_RETURN;
|
||||
|
||||
// The ro+rw+md+mc spaces size
|
||||
static size_t core_spaces_size() {
|
||||
return align_size_up((SharedReadOnlySize + SharedReadWriteSize +
|
||||
SharedMiscDataSize + SharedMiscCodeSize),
|
||||
os::vm_allocation_granularity());
|
||||
}
|
||||
|
||||
// The estimated optional space size.
|
||||
//
|
||||
// Currently the optional space only has archived class bytes.
|
||||
// The core_spaces_size is the size of all class metadata, which is a good
|
||||
// estimate of the total class bytes to be archived. Only the portion
|
||||
// containing data is written out to the archive and mapped at runtime.
|
||||
// There is no memory waste due to unused portion in optional space.
|
||||
static size_t optional_space_size() {
|
||||
return core_spaces_size();
|
||||
}
|
||||
|
||||
// Total shared_spaces size includes the ro, rw, md, mc and od spaces
|
||||
static size_t shared_spaces_size() {
|
||||
return align_size_up(SharedReadOnlySize + SharedReadWriteSize +
|
||||
SharedMiscDataSize + SharedMiscCodeSize,
|
||||
os::vm_allocation_granularity());
|
||||
return core_spaces_size() + optional_space_size();
|
||||
}
|
||||
|
||||
// Stop CDS sharing and unmap CDS regions.
|
||||
|
@ -3172,36 +3172,28 @@ void Metaspace::global_initialize() {
|
||||
address cds_address = NULL;
|
||||
FileMapInfo* mapinfo = new FileMapInfo();
|
||||
|
||||
if (JvmtiExport::should_post_class_file_load_hook()) {
|
||||
// Currently CDS does not support JVMTI CFLH when loading shared class.
|
||||
// If JvmtiExport::should_post_class_file_load_hook is already enabled,
|
||||
// just disable UseSharedSpaces.
|
||||
FileMapInfo::fail_continue("Tool agent requires sharing to be disabled.");
|
||||
delete mapinfo;
|
||||
} else {
|
||||
// Open the shared archive file, read and validate the header. If
|
||||
// initialization fails, shared spaces [UseSharedSpaces] are
|
||||
// disabled and the file is closed.
|
||||
// Map in spaces now also
|
||||
if (mapinfo->initialize() && MetaspaceShared::map_shared_spaces(mapinfo)) {
|
||||
cds_total = FileMapInfo::shared_spaces_size();
|
||||
cds_address = (address)mapinfo->header()->region_addr(0);
|
||||
// Open the shared archive file, read and validate the header. If
|
||||
// initialization fails, shared spaces [UseSharedSpaces] are
|
||||
// disabled and the file is closed.
|
||||
// Map in spaces now also
|
||||
if (mapinfo->initialize() && MetaspaceShared::map_shared_spaces(mapinfo)) {
|
||||
cds_total = FileMapInfo::shared_spaces_size();
|
||||
cds_address = (address)mapinfo->header()->region_addr(0);
|
||||
#ifdef _LP64
|
||||
if (using_class_space()) {
|
||||
char* cds_end = (char*)(cds_address + cds_total);
|
||||
cds_end = (char *)align_ptr_up(cds_end, _reserve_alignment);
|
||||
// If UseCompressedClassPointers is set then allocate the metaspace area
|
||||
// above the heap and above the CDS area (if it exists).
|
||||
allocate_metaspace_compressed_klass_ptrs(cds_end, cds_address);
|
||||
// Map the shared string space after compressed pointers
|
||||
// because it relies on compressed class pointers setting to work
|
||||
mapinfo->map_string_regions();
|
||||
}
|
||||
#endif // _LP64
|
||||
} else {
|
||||
assert(!mapinfo->is_open() && !UseSharedSpaces,
|
||||
"archive file not closed or shared spaces not disabled.");
|
||||
if (using_class_space()) {
|
||||
char* cds_end = (char*)(cds_address + cds_total);
|
||||
cds_end = (char *)align_ptr_up(cds_end, _reserve_alignment);
|
||||
// If UseCompressedClassPointers is set then allocate the metaspace area
|
||||
// above the heap and above the CDS area (if it exists).
|
||||
allocate_metaspace_compressed_klass_ptrs(cds_end, cds_address);
|
||||
// Map the shared string space after compressed pointers
|
||||
// because it relies on compressed class pointers setting to work
|
||||
mapinfo->map_string_regions();
|
||||
}
|
||||
#endif // _LP64
|
||||
} else {
|
||||
assert(!mapinfo->is_open() && !UseSharedSpaces,
|
||||
"archive file not closed or shared spaces not disabled.");
|
||||
}
|
||||
}
|
||||
#endif // INCLUDE_CDS
|
||||
|
@ -65,6 +65,7 @@ address MetaspaceShared::_cds_i2i_entry_code_buffers = NULL;
|
||||
size_t MetaspaceShared::_cds_i2i_entry_code_buffers_size = 0;
|
||||
SharedMiscRegion MetaspaceShared::_mc;
|
||||
SharedMiscRegion MetaspaceShared::_md;
|
||||
SharedMiscRegion MetaspaceShared::_od;
|
||||
|
||||
void SharedMiscRegion::initialize(ReservedSpace rs, size_t committed_byte_size, SharedSpaceType space_type) {
|
||||
_vs.initialize(rs, committed_byte_size);
|
||||
@ -93,16 +94,24 @@ void MetaspaceShared::initialize_shared_rs(ReservedSpace* rs) {
|
||||
assert(DumpSharedSpaces, "dump time only");
|
||||
_shared_rs = rs;
|
||||
|
||||
// Split up and initialize the misc code and data spaces
|
||||
size_t core_spaces_size = FileMapInfo::core_spaces_size();
|
||||
size_t metadata_size = SharedReadOnlySize + SharedReadWriteSize;
|
||||
ReservedSpace shared_ro_rw = _shared_rs->first_part(metadata_size);
|
||||
ReservedSpace misc_section = _shared_rs->last_part(metadata_size);
|
||||
|
||||
// Now split into misc sections.
|
||||
// Split into the core and optional sections
|
||||
ReservedSpace core_data = _shared_rs->first_part(core_spaces_size);
|
||||
ReservedSpace optional_data = _shared_rs->last_part(core_spaces_size);
|
||||
|
||||
// The RO/RW and the misc sections
|
||||
ReservedSpace shared_ro_rw = core_data.first_part(metadata_size);
|
||||
ReservedSpace misc_section = core_data.last_part(metadata_size);
|
||||
|
||||
// Now split the misc code and misc data sections.
|
||||
ReservedSpace md_rs = misc_section.first_part(SharedMiscDataSize);
|
||||
ReservedSpace mc_rs = misc_section.last_part(SharedMiscDataSize);
|
||||
|
||||
_md.initialize(md_rs, SharedMiscDataSize, SharedMiscData);
|
||||
_mc.initialize(mc_rs, SharedMiscCodeSize, SharedMiscData);
|
||||
_mc.initialize(mc_rs, SharedMiscCodeSize, SharedMiscCode);
|
||||
_od.initialize(optional_data, metadata_size, SharedOptional);
|
||||
}
|
||||
|
||||
// Read/write a data stream for restoring/preserving metadata pointers and
|
||||
@ -521,6 +530,7 @@ private:
|
||||
GrowableArray<Klass*> *_class_promote_order;
|
||||
VirtualSpace _md_vs;
|
||||
VirtualSpace _mc_vs;
|
||||
VirtualSpace _od_vs;
|
||||
GrowableArray<MemRegion> *_string_regions;
|
||||
|
||||
public:
|
||||
@ -598,15 +608,19 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
remove_unshareable_in_classes();
|
||||
tty->print_cr("done. ");
|
||||
|
||||
// Set up the share data and shared code segments.
|
||||
// Set up the misc data, misc code and optional data segments.
|
||||
_md_vs = *MetaspaceShared::misc_data_region()->virtual_space();
|
||||
_mc_vs = *MetaspaceShared::misc_code_region()->virtual_space();
|
||||
_od_vs = *MetaspaceShared::optional_data_region()->virtual_space();
|
||||
char* md_low = _md_vs.low();
|
||||
char* md_top = MetaspaceShared::misc_data_region()->alloc_top();
|
||||
char* md_end = _md_vs.high();
|
||||
char* mc_low = _mc_vs.low();
|
||||
char* mc_top = MetaspaceShared::misc_code_region()->alloc_top();
|
||||
char* mc_end = _mc_vs.high();
|
||||
char* od_low = _od_vs.low();
|
||||
char* od_top = MetaspaceShared::optional_data_region()->alloc_top();
|
||||
char* od_end = _od_vs.high();
|
||||
|
||||
// Reserve space for the list of Klass*s whose vtables are used
|
||||
// for patching others as needed.
|
||||
@ -661,28 +675,32 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
const size_t rw_alloced = rw_space->capacity_bytes_slow(Metaspace::NonClassType);
|
||||
const size_t md_alloced = md_end-md_low;
|
||||
const size_t mc_alloced = mc_end-mc_low;
|
||||
const size_t od_alloced = od_end-od_low;
|
||||
const size_t total_alloced = ro_alloced + rw_alloced + md_alloced + mc_alloced
|
||||
+ ss_bytes;
|
||||
+ ss_bytes + od_alloced;
|
||||
|
||||
// Occupied size of each space.
|
||||
const size_t ro_bytes = ro_space->used_bytes_slow(Metaspace::NonClassType);
|
||||
const size_t rw_bytes = rw_space->used_bytes_slow(Metaspace::NonClassType);
|
||||
const size_t md_bytes = size_t(md_top - md_low);
|
||||
const size_t mc_bytes = size_t(mc_top - mc_low);
|
||||
const size_t od_bytes = size_t(od_top - od_low);
|
||||
|
||||
// Percent of total size
|
||||
const size_t total_bytes = ro_bytes + rw_bytes + md_bytes + mc_bytes + ss_bytes;
|
||||
const size_t total_bytes = ro_bytes + rw_bytes + md_bytes + mc_bytes + ss_bytes + od_bytes;
|
||||
const double ro_t_perc = ro_bytes / double(total_bytes) * 100.0;
|
||||
const double rw_t_perc = rw_bytes / double(total_bytes) * 100.0;
|
||||
const double md_t_perc = md_bytes / double(total_bytes) * 100.0;
|
||||
const double mc_t_perc = mc_bytes / double(total_bytes) * 100.0;
|
||||
const double ss_t_perc = ss_bytes / double(total_bytes) * 100.0;
|
||||
const double od_t_perc = od_bytes / double(total_bytes) * 100.0;
|
||||
|
||||
// Percent of fullness of each space
|
||||
const double ro_u_perc = ro_bytes / double(ro_alloced) * 100.0;
|
||||
const double rw_u_perc = rw_bytes / double(rw_alloced) * 100.0;
|
||||
const double md_u_perc = md_bytes / double(md_alloced) * 100.0;
|
||||
const double mc_u_perc = mc_bytes / double(mc_alloced) * 100.0;
|
||||
const double od_u_perc = od_bytes / double(od_alloced) * 100.0;
|
||||
const double total_u_perc = total_bytes / double(total_alloced) * 100.0;
|
||||
|
||||
#define fmt_space "%s space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used] at " INTPTR_FORMAT
|
||||
@ -691,6 +709,7 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
tty->print_cr(fmt_space, "md", md_bytes, md_t_perc, md_alloced, md_u_perc, p2i(md_low));
|
||||
tty->print_cr(fmt_space, "mc", mc_bytes, mc_t_perc, mc_alloced, mc_u_perc, p2i(mc_low));
|
||||
tty->print_cr(fmt_space, "st", ss_bytes, ss_t_perc, ss_bytes, 100.0, p2i(ss_low));
|
||||
tty->print_cr(fmt_space, "od", od_bytes, od_t_perc, od_alloced, od_u_perc, p2i(od_low));
|
||||
tty->print_cr("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
|
||||
total_bytes, total_alloced, total_u_perc);
|
||||
|
||||
@ -734,6 +753,10 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
SharedMiscCodeSize,
|
||||
true, true);
|
||||
mapinfo->write_string_regions(_string_regions);
|
||||
mapinfo->write_region(MetaspaceShared::od, _od_vs.low(),
|
||||
pointer_delta(od_top, _od_vs.low(), sizeof(char)),
|
||||
pointer_delta(od_end, _od_vs.low(), sizeof(char)),
|
||||
true, false);
|
||||
}
|
||||
|
||||
mapinfo->close();
|
||||
@ -1049,8 +1072,6 @@ void MetaspaceShared::print_shared_spaces() {
|
||||
|
||||
|
||||
// Map shared spaces at requested addresses and return if succeeded.
|
||||
// Need to keep the bounds of the ro and rw space for the Metaspace::contains
|
||||
// call, or is_in_shared_space.
|
||||
bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
size_t image_alignment = mapinfo->alignment();
|
||||
|
||||
@ -1068,6 +1089,7 @@ bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
char* _rw_base = NULL;
|
||||
char* _md_base = NULL;
|
||||
char* _mc_base = NULL;
|
||||
char* _od_base = NULL;
|
||||
|
||||
// Map each shared region
|
||||
if ((_ro_base = mapinfo->map_region(ro)) != NULL &&
|
||||
@ -1078,6 +1100,8 @@ bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
mapinfo->verify_region_checksum(md) &&
|
||||
(_mc_base = mapinfo->map_region(mc)) != NULL &&
|
||||
mapinfo->verify_region_checksum(mc) &&
|
||||
(_od_base = mapinfo->map_region(od)) != NULL &&
|
||||
mapinfo->verify_region_checksum(od) &&
|
||||
(image_alignment == (size_t)max_alignment()) &&
|
||||
mapinfo->validate_classpath_entry_table()) {
|
||||
// Success (no need to do anything)
|
||||
@ -1089,6 +1113,7 @@ bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
if (_rw_base != NULL) mapinfo->unmap_region(rw);
|
||||
if (_md_base != NULL) mapinfo->unmap_region(md);
|
||||
if (_mc_base != NULL) mapinfo->unmap_region(mc);
|
||||
if (_od_base != NULL) mapinfo->unmap_region(od);
|
||||
#ifndef _WINDOWS
|
||||
// Release the entire mapped region
|
||||
shared_rs.release();
|
||||
|
@ -132,6 +132,7 @@ class MetaspaceShared : AllStatic {
|
||||
// Used only during dumping.
|
||||
static SharedMiscRegion _md;
|
||||
static SharedMiscRegion _mc;
|
||||
static SharedMiscRegion _od;
|
||||
public:
|
||||
enum {
|
||||
vtbl_list_size = DEFAULT_VTBL_LIST_SIZE,
|
||||
@ -148,7 +149,10 @@ class MetaspaceShared : AllStatic {
|
||||
max_strings = 2, // max number of string regions in string space
|
||||
num_non_strings = 4, // number of non-string regions
|
||||
first_string = num_non_strings, // index of first string region
|
||||
n_regions = max_strings + num_non_strings // total number of regions
|
||||
// The optional data region is the last region.
|
||||
// Currently it only contains class file data.
|
||||
od = max_strings + num_non_strings,
|
||||
n_regions = od + 1 // total number of regions
|
||||
};
|
||||
|
||||
// Accessor functions to save shared space created for metadata, which has
|
||||
@ -222,9 +226,10 @@ class MetaspaceShared : AllStatic {
|
||||
static int count_class(const char* classlist_file);
|
||||
static void estimate_regions_size() NOT_CDS_RETURN;
|
||||
|
||||
// Allocate a block of memory from the "mc" or "md" regions.
|
||||
// Allocate a block of memory from the "mc", "md", or "od" regions.
|
||||
static char* misc_code_space_alloc(size_t num_bytes) { return _mc.alloc(num_bytes); }
|
||||
static char* misc_data_space_alloc(size_t num_bytes) { return _md.alloc(num_bytes); }
|
||||
static char* optional_data_space_alloc(size_t num_bytes) { return _od.alloc(num_bytes); }
|
||||
|
||||
static address cds_i2i_entry_code_buffers(size_t total_size);
|
||||
|
||||
@ -243,5 +248,9 @@ class MetaspaceShared : AllStatic {
|
||||
assert(DumpSharedSpaces, "used during dumping only");
|
||||
return &_md;
|
||||
}
|
||||
static SharedMiscRegion* optional_data_region() {
|
||||
assert(DumpSharedSpaces, "used during dumping only");
|
||||
return &_od;
|
||||
}
|
||||
};
|
||||
#endif // SHARE_VM_MEMORY_METASPACESHARED_HPP
|
||||
|
@ -41,6 +41,7 @@
|
||||
#include "memory/heapInspection.hpp"
|
||||
#include "memory/iterator.inline.hpp"
|
||||
#include "memory/metadataFactory.hpp"
|
||||
#include "memory/metaspaceShared.hpp"
|
||||
#include "memory/oopFactory.hpp"
|
||||
#include "memory/resourceArea.hpp"
|
||||
#include "oops/fieldStreams.hpp"
|
||||
@ -1972,11 +1973,6 @@ void InstanceKlass::remove_unshareable_info() {
|
||||
m->remove_unshareable_info();
|
||||
}
|
||||
|
||||
// cached_class_file might be pointing to a malloc'ed buffer allocated by
|
||||
// event-based tracing code at CDS dump time. It's not usable at runtime
|
||||
// so let's clear it.
|
||||
set_cached_class_file(NULL);
|
||||
|
||||
// do array classes also.
|
||||
array_klasses_do(remove_unshareable_in_class);
|
||||
}
|
||||
@ -2070,6 +2066,7 @@ void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) {
|
||||
}
|
||||
|
||||
void InstanceKlass::release_C_heap_structures() {
|
||||
assert(!this->is_shared(), "should not be called for a shared class");
|
||||
|
||||
// Can't release the constant pool here because the constant pool can be
|
||||
// deallocated separately from the InstanceKlass for default methods and
|
||||
@ -2250,8 +2247,8 @@ void InstanceKlass::set_package(ClassLoaderData* loader_data, TRAPS) {
|
||||
// the java.base module. If a non-java.base package is erroneously placed
|
||||
// in the java.base module it will be caught later when java.base
|
||||
// is defined by ModuleEntryTable::verify_javabase_packages check.
|
||||
assert(ModuleEntryTable::javabase_module() != NULL, "java.base module is NULL");
|
||||
_package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_module());
|
||||
assert(ModuleEntryTable::javabase_moduleEntry() != NULL, "java.base module is NULL");
|
||||
_package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry());
|
||||
} else {
|
||||
assert(loader_data->modules()->unnamed_module() != NULL, "unnamed module is NULL");
|
||||
_package_entry = loader_data->packages()->lookup(pkg_name,
|
||||
@ -3653,6 +3650,15 @@ Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
|
||||
}
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
|
||||
if (MetaspaceShared::is_in_shared_space(_cached_class_file)) {
|
||||
// Ignore the archived class stream data
|
||||
return NULL;
|
||||
} else {
|
||||
return _cached_class_file;
|
||||
}
|
||||
}
|
||||
|
||||
jint InstanceKlass::get_cached_class_file_len() {
|
||||
return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
|
||||
}
|
||||
@ -3660,4 +3666,15 @@ jint InstanceKlass::get_cached_class_file_len() {
|
||||
unsigned char * InstanceKlass::get_cached_class_file_bytes() {
|
||||
return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
|
||||
}
|
||||
|
||||
#if INCLUDE_CDS
|
||||
JvmtiCachedClassFileData* InstanceKlass::get_archived_class_data() {
|
||||
assert(this->is_shared(), "class should be shared");
|
||||
if (MetaspaceShared::is_in_shared_space(_cached_class_file)) {
|
||||
return _cached_class_file;
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
@ -783,7 +783,7 @@ public:
|
||||
void set_cached_class_file(JvmtiCachedClassFileData *data) {
|
||||
_cached_class_file = data;
|
||||
}
|
||||
JvmtiCachedClassFileData * get_cached_class_file() { return _cached_class_file; }
|
||||
JvmtiCachedClassFileData * get_cached_class_file();
|
||||
jint get_cached_class_file_len();
|
||||
unsigned char * get_cached_class_file_bytes();
|
||||
|
||||
@ -795,6 +795,13 @@ public:
|
||||
return _jvmti_cached_class_field_map;
|
||||
}
|
||||
|
||||
#if INCLUDE_CDS
|
||||
void set_archived_class_data(JvmtiCachedClassFileData* data) {
|
||||
_cached_class_file = data;
|
||||
}
|
||||
|
||||
JvmtiCachedClassFileData * get_archived_class_data();
|
||||
#endif // INCLUDE_CDS
|
||||
#else // INCLUDE_JVMTI
|
||||
|
||||
static void purge_previous_versions(InstanceKlass* ik) { return; };
|
||||
|
@ -530,7 +530,7 @@ void Klass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protec
|
||||
InstanceKlass* ik = (InstanceKlass*) k;
|
||||
module_entry = ik->module();
|
||||
} else {
|
||||
module_entry = ModuleEntryTable::javabase_module();
|
||||
module_entry = ModuleEntryTable::javabase_moduleEntry();
|
||||
}
|
||||
// Obtain java.lang.reflect.Module, if available
|
||||
Handle module_handle(THREAD, ((module_entry != NULL) ? JNIHandles::resolve(module_entry->module()) : (oop)NULL));
|
||||
|
@ -87,6 +87,7 @@ class oopDesc {
|
||||
inline narrowKlass* compressed_klass_addr();
|
||||
|
||||
inline void set_klass(Klass* k);
|
||||
inline void release_set_klass(Klass* k);
|
||||
|
||||
// For klass field compression
|
||||
inline int klass_gap() const;
|
||||
|
@ -129,10 +129,14 @@ narrowKlass* oopDesc::compressed_klass_addr() {
|
||||
return &_metadata._compressed_klass;
|
||||
}
|
||||
|
||||
#define CHECK_SET_KLASS(k) \
|
||||
do { \
|
||||
assert(Universe::is_bootstrapping() || k != NULL, "NULL Klass"); \
|
||||
assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass"); \
|
||||
} while (0)
|
||||
|
||||
void oopDesc::set_klass(Klass* k) {
|
||||
// since klasses are promoted no store check is needed
|
||||
assert(Universe::is_bootstrapping() || k != NULL, "must be a real Klass*");
|
||||
assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass*");
|
||||
CHECK_SET_KLASS(k);
|
||||
if (UseCompressedClassPointers) {
|
||||
*compressed_klass_addr() = Klass::encode_klass_not_null(k);
|
||||
} else {
|
||||
@ -140,6 +144,18 @@ void oopDesc::set_klass(Klass* k) {
|
||||
}
|
||||
}
|
||||
|
||||
void oopDesc::release_set_klass(Klass* k) {
|
||||
CHECK_SET_KLASS(k);
|
||||
if (UseCompressedClassPointers) {
|
||||
OrderAccess::release_store(compressed_klass_addr(),
|
||||
Klass::encode_klass_not_null(k));
|
||||
} else {
|
||||
OrderAccess::release_store_ptr(klass_addr(), k);
|
||||
}
|
||||
}
|
||||
|
||||
#undef CHECK_SET_KLASS
|
||||
|
||||
int oopDesc::klass_gap() const {
|
||||
return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ TypeArrayKlass* TypeArrayKlass::create_klass(BasicType type,
|
||||
null_loader_data->add_class(ak);
|
||||
|
||||
// Call complete_create_array_klass after all instance variables have been initialized.
|
||||
complete_create_array_klass(ak, ak->super(), ModuleEntryTable::javabase_module(), CHECK_NULL);
|
||||
complete_create_array_klass(ak, ak->super(), ModuleEntryTable::javabase_moduleEntry(), CHECK_NULL);
|
||||
|
||||
return ak;
|
||||
}
|
||||
@ -347,7 +347,7 @@ const char* TypeArrayKlass::internal_name() const {
|
||||
|
||||
// A TypeArrayKlass is an array of a primitive type, its defining module is java.base
|
||||
ModuleEntry* TypeArrayKlass::module() const {
|
||||
return ModuleEntryTable::javabase_module();
|
||||
return ModuleEntryTable::javabase_moduleEntry();
|
||||
}
|
||||
|
||||
PackageEntry* TypeArrayKlass::package() const {
|
||||
|
@ -272,6 +272,31 @@ public:
|
||||
|
||||
// Get/PutObject must be special-cased, since it works with handles.
|
||||
|
||||
// We could be accessing the referent field in a reference
|
||||
// object. If G1 is enabled then we need to register non-null
|
||||
// referent with the SATB barrier.
|
||||
|
||||
#if INCLUDE_ALL_GCS
|
||||
static bool is_java_lang_ref_Reference_access(oop o, jlong offset) {
|
||||
if (offset == java_lang_ref_Reference::referent_offset && o != NULL) {
|
||||
Klass* k = o->klass();
|
||||
if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
|
||||
assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
static void ensure_satb_referent_alive(oop o, jlong offset, oop v) {
|
||||
#if INCLUDE_ALL_GCS
|
||||
if (UseG1GC && v != NULL && is_java_lang_ref_Reference_access(o, offset)) {
|
||||
G1SATBCardTableModRefBS::enqueue(v);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// These functions allow a null base pointer with an arbitrary address.
|
||||
// But if the base pointer is non-null, the offset should make some sense.
|
||||
// That is, it should be in the range [0, MAX_OBJECT_SIZE].
|
||||
@ -286,34 +311,9 @@ UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj,
|
||||
v = *(oop*)index_oop_from_field_offset_long(p, offset);
|
||||
}
|
||||
|
||||
jobject ret = JNIHandles::make_local(env, v);
|
||||
ensure_satb_referent_alive(p, offset, v);
|
||||
|
||||
#if INCLUDE_ALL_GCS
|
||||
// We could be accessing the referent field in a reference
|
||||
// object. If G1 is enabled then we need to register non-null
|
||||
// referent with the SATB barrier.
|
||||
if (UseG1GC) {
|
||||
bool needs_barrier = false;
|
||||
|
||||
if (ret != NULL) {
|
||||
if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) {
|
||||
oop o = JNIHandles::resolve(obj);
|
||||
Klass* k = o->klass();
|
||||
if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
|
||||
assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
|
||||
needs_barrier = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (needs_barrier) {
|
||||
oop referent = JNIHandles::resolve(ret);
|
||||
G1SATBCardTableModRefBS::enqueue(referent);
|
||||
}
|
||||
}
|
||||
#endif // INCLUDE_ALL_GCS
|
||||
|
||||
return ret;
|
||||
return JNIHandles::make_local(env, v);
|
||||
} UNSAFE_END
|
||||
|
||||
UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
|
||||
@ -344,6 +344,8 @@ UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobj
|
||||
(void)const_cast<oop&>(v = *(volatile oop*) addr);
|
||||
}
|
||||
|
||||
ensure_satb_referent_alive(p, offset, v);
|
||||
|
||||
OrderAccess::acquire();
|
||||
return JNIHandles::make_local(env, v);
|
||||
} UNSAFE_END
|
||||
|
@ -1596,6 +1596,10 @@ public:
|
||||
product(bool, AlwaysPreTouch, false, \
|
||||
"Force all freshly committed pages to be pre-touched") \
|
||||
\
|
||||
product(size_t, PreTouchParallelChunkSize, 1 * G, \
|
||||
"Per-thread chunk size for parallel memory pre-touch.") \
|
||||
range(1, SIZE_MAX / 2) \
|
||||
\
|
||||
product_pd(size_t, CMSYoungGenPerWorker, \
|
||||
"The maximum size of young gen chosen by default per GC worker " \
|
||||
"thread available") \
|
||||
|
@ -77,6 +77,8 @@ Mutex* Shared_SATB_Q_lock = NULL;
|
||||
Mutex* DirtyCardQ_FL_lock = NULL;
|
||||
Monitor* DirtyCardQ_CBL_mon = NULL;
|
||||
Mutex* Shared_DirtyCardQ_lock = NULL;
|
||||
Mutex* MarkStackFreeList_lock = NULL;
|
||||
Mutex* MarkStackChunkList_lock = NULL;
|
||||
Mutex* ParGCRareEvent_lock = NULL;
|
||||
Mutex* DerivedPointerTableGC_lock = NULL;
|
||||
Mutex* Compile_lock = NULL;
|
||||
@ -194,6 +196,9 @@ void mutex_init() {
|
||||
|
||||
def(StringDedupQueue_lock , Monitor, leaf, true, Monitor::_safepoint_check_never);
|
||||
def(StringDedupTable_lock , Mutex , leaf, true, Monitor::_safepoint_check_never);
|
||||
|
||||
def(MarkStackFreeList_lock , Mutex , leaf , true, Monitor::_safepoint_check_never);
|
||||
def(MarkStackChunkList_lock , Mutex , leaf , true, Monitor::_safepoint_check_never);
|
||||
}
|
||||
def(ParGCRareEvent_lock , Mutex , leaf , true, Monitor::_safepoint_check_sometimes);
|
||||
def(DerivedPointerTableGC_lock , Mutex, leaf, true, Monitor::_safepoint_check_never);
|
||||
|
@ -81,7 +81,8 @@ extern Monitor* DirtyCardQ_CBL_mon; // Protects dirty card Q
|
||||
extern Mutex* Shared_DirtyCardQ_lock; // Lock protecting dirty card
|
||||
// queue shared by
|
||||
// non-Java threads.
|
||||
// (see option ExplicitGCInvokesConcurrent)
|
||||
extern Mutex* MarkStackFreeList_lock; // Protects access to the global mark stack free list.
|
||||
extern Mutex* MarkStackChunkList_lock; // Protects access to the global mark stack chunk list.
|
||||
extern Mutex* ParGCRareEvent_lock; // Synchronizes various (rare) parallel GC ops.
|
||||
extern Mutex* Compile_lock; // a lock held when Compilation is updating code (used to block CodeCache traversal, CHA updates, etc)
|
||||
extern Monitor* MethodCompileQueue_lock; // a lock held when method compilations are enqueued, dequeued
|
||||
|
@ -1705,8 +1705,8 @@ bool os::release_memory(char* addr, size_t bytes) {
|
||||
return res;
|
||||
}
|
||||
|
||||
void os::pretouch_memory(void* start, void* end) {
|
||||
for (volatile char *p = (char*)start; p < (char*)end; p += os::vm_page_size()) {
|
||||
void os::pretouch_memory(void* start, void* end, size_t page_size) {
|
||||
for (volatile char *p = (char*)start; p < (char*)end; p += page_size) {
|
||||
*p = 0;
|
||||
}
|
||||
}
|
||||
|
@ -324,7 +324,7 @@ class os: AllStatic {
|
||||
// to make the OS back the memory range with actual memory.
|
||||
// Current implementation may not touch the last page if unaligned addresses
|
||||
// are passed.
|
||||
static void pretouch_memory(void* start, void* end);
|
||||
static void pretouch_memory(void* start, void* end, size_t page_size = vm_page_size());
|
||||
|
||||
enum ProtType { MEM_PROT_NONE, MEM_PROT_READ, MEM_PROT_RW, MEM_PROT_RWX };
|
||||
static bool protect_memory(char* addr, size_t bytes, ProtType prot,
|
||||
|
@ -282,6 +282,12 @@ void report_untested(const char* file, int line, const char* message) {
|
||||
}
|
||||
|
||||
void report_out_of_shared_space(SharedSpaceType shared_space) {
|
||||
if (shared_space == SharedOptional) {
|
||||
// The estimated shared_optional_space size is large enough
|
||||
// for all class bytes. It should not run out of space.
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
|
||||
static const char* name[] = {
|
||||
"shared read only space",
|
||||
"shared read write space",
|
||||
|
@ -271,7 +271,8 @@ enum SharedSpaceType {
|
||||
SharedReadOnly,
|
||||
SharedReadWrite,
|
||||
SharedMiscData,
|
||||
SharedMiscCode
|
||||
SharedMiscCode,
|
||||
SharedOptional
|
||||
};
|
||||
|
||||
void report_out_of_shared_space(SharedSpaceType space_type);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -79,8 +79,8 @@ template <MEMFLAGS F> inline BasicHashtableEntry<F>* BasicHashtable<F>::bucket(i
|
||||
|
||||
|
||||
template <MEMFLAGS F> inline void HashtableBucket<F>::set_entry(BasicHashtableEntry<F>* l) {
|
||||
// Warning: Preserve store ordering. The SystemDictionary is read
|
||||
// without locks. The new SystemDictionaryEntry must be
|
||||
// Warning: Preserve store ordering. The PackageEntryTable, ModuleEntryTable and
|
||||
// SystemDictionary are read without locks. The new entry must be
|
||||
// complete before other threads can be allowed to see it
|
||||
// via a store to _buckets[index].
|
||||
OrderAccess::release_store_ptr(&_entry, l);
|
||||
@ -88,8 +88,8 @@ template <MEMFLAGS F> inline void HashtableBucket<F>::set_entry(BasicHashtableEn
|
||||
|
||||
|
||||
template <MEMFLAGS F> inline BasicHashtableEntry<F>* HashtableBucket<F>::get_entry() const {
|
||||
// Warning: Preserve load ordering. The SystemDictionary is read
|
||||
// without locks. The new SystemDictionaryEntry must be
|
||||
// Warning: Preserve load ordering. The PackageEntryTable, ModuleEntryTable and
|
||||
// SystemDictionary are read without locks. The new entry must be
|
||||
// complete before other threads can be allowed to see it
|
||||
// via a store to _buckets[index].
|
||||
return (BasicHashtableEntry<F>*) OrderAccess::load_ptr_acquire(&_entry);
|
||||
|
@ -110,10 +110,6 @@ public class TestOptionsWithRanges {
|
||||
excludeTestMaxRange("OldSize");
|
||||
excludeTestMaxRange("ParallelGCThreads");
|
||||
|
||||
excludeTestMaxRange("CompilerThreadStackSize");
|
||||
excludeTestMaxRange("ThreadStackSize");
|
||||
excludeTestMaxRange("VMThreadStackSize");
|
||||
|
||||
/*
|
||||
* Remove parameters controlling the code cache. As these
|
||||
* parameters have implications on the physical memory
|
||||
|
115
hotspot/test/runtime/SharedArchiveFile/CDSTestUtils.java
Normal file
115
hotspot/test/runtime/SharedArchiveFile/CDSTestUtils.java
Normal file
@ -0,0 +1,115 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import jdk.test.lib.process.OutputAnalyzer;
|
||||
|
||||
|
||||
// This class contains common test utilities for CDS testing
|
||||
public class CDSTestUtils {
|
||||
|
||||
// check result of 'dump' operation
|
||||
public static void checkDump(OutputAnalyzer output, String... extraMatches)
|
||||
throws Exception {
|
||||
|
||||
output.shouldContain("Loading classes to share");
|
||||
output.shouldHaveExitValue(0);
|
||||
|
||||
for (String match : extraMatches) {
|
||||
output.shouldContain(match);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// check the output for indication that mapping of the archive failed
|
||||
public static boolean isUnableToMap(OutputAnalyzer output) {
|
||||
String outStr = output.getOutput();
|
||||
if ((output.getExitValue() == 1) && (
|
||||
outStr.contains("Unable to reserve shared space at required address") ||
|
||||
outStr.contains("Unable to map ReadOnly shared space at required address") ||
|
||||
outStr.contains("Unable to map ReadWrite shared space at required address") ||
|
||||
outStr.contains("Unable to map MiscData shared space at required address") ||
|
||||
outStr.contains("Unable to map MiscCode shared space at required address") ||
|
||||
outStr.contains("Unable to map shared string space at required address") ||
|
||||
outStr.contains("Could not allocate metaspace at a compatible address") ||
|
||||
outStr.contains("Unable to allocate shared string space: range is not within java heap") ))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// check result of 'exec' operation, that is when JVM is run using the archive
|
||||
public static void checkExec(OutputAnalyzer output, String... extraMatches) throws Exception {
|
||||
if (isUnableToMap(output)) {
|
||||
System.out.println("Unable to map shared archive: test did not complete; assumed PASS");
|
||||
return;
|
||||
}
|
||||
output.shouldContain("sharing");
|
||||
output.shouldHaveExitValue(0);
|
||||
|
||||
for (String match : extraMatches) {
|
||||
output.shouldContain(match);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// get the file object for the test artifact
|
||||
private static File getTestArtifactFile(String prefix, String name) {
|
||||
File dir = new File(System.getProperty("test.classes", "."));
|
||||
return new File(dir, prefix + name);
|
||||
}
|
||||
|
||||
|
||||
// create file containing the specified class list
|
||||
public static File makeClassList(String testCaseName, String classes[])
|
||||
throws Exception {
|
||||
|
||||
File classList = getTestArtifactFile(testCaseName, "test.classlist");
|
||||
FileOutputStream fos = new FileOutputStream(classList);
|
||||
PrintStream ps = new PrintStream(fos);
|
||||
|
||||
addToClassList(ps, classes);
|
||||
|
||||
ps.close();
|
||||
fos.close();
|
||||
|
||||
return classList;
|
||||
}
|
||||
|
||||
|
||||
private static void addToClassList(PrintStream ps, String classes[])
|
||||
throws IOException
|
||||
{
|
||||
if (classes != null) {
|
||||
for (String s : classes) {
|
||||
ps.println(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,29 +21,17 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 6536295
|
||||
* @summary Tests JumboEnumSet encoding
|
||||
* @author Sergey Malenkov
|
||||
*/
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Set;
|
||||
|
||||
public final class java_util_JumboEnumSet extends AbstractTest<Set<EnumPrivate>> {
|
||||
public class Implementor implements Interface {
|
||||
public static void main(String[] args) {
|
||||
new java_util_JumboEnumSet().test(true);
|
||||
System.out.println("Implementor: entering main()");
|
||||
test();
|
||||
}
|
||||
|
||||
protected Set<EnumPrivate> getObject() {
|
||||
return EnumSet.noneOf(EnumPrivate.class);
|
||||
}
|
||||
|
||||
protected Set<EnumPrivate> getAnotherObject() {
|
||||
Set<EnumPrivate> set = EnumSet.noneOf(EnumPrivate.class);
|
||||
set.add(EnumPrivate.A0);
|
||||
set.add(EnumPrivate.Z9);
|
||||
return set;
|
||||
public static void test() {
|
||||
// from interface
|
||||
(new Implementor()).printString();
|
||||
// from implementor
|
||||
System.out.println(TransformUtil.ChildCheckPattern +
|
||||
TransformUtil.BeforePattern);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,4 +21,11 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
public enum EnumPublic {A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z}
|
||||
public interface Interface {
|
||||
public static final String stringToBeTransformed =
|
||||
TransformUtil.ParentCheckPattern + TransformUtil.BeforePattern;
|
||||
|
||||
default void printString() {
|
||||
System.out.println(stringToBeTransformed);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,29 +21,20 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 6536295
|
||||
* @summary Tests RegularEnumSet encoding
|
||||
* @author Sergey Malenkov
|
||||
*/
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Set;
|
||||
|
||||
public final class java_util_RegularEnumSet extends AbstractTest<Set<EnumPublic>> {
|
||||
public class SubClass extends SuperClazz {
|
||||
public static void main(String[] args) {
|
||||
new java_util_RegularEnumSet().test(true);
|
||||
System.out.println("SubClass: entering main()");
|
||||
test();
|
||||
}
|
||||
|
||||
protected Set<EnumPublic> getObject() {
|
||||
return EnumSet.noneOf(EnumPublic.class);
|
||||
public static void test() {
|
||||
// The line below will be used to check for successful class transformation
|
||||
System.out.println(TransformUtil.ChildCheckPattern +
|
||||
TransformUtil.BeforePattern);
|
||||
(new SubClass()).callParent();
|
||||
}
|
||||
|
||||
protected Set<EnumPublic> getAnotherObject() {
|
||||
Set<EnumPublic> set = EnumSet.noneOf(EnumPublic.class);
|
||||
set.add(EnumPublic.A);
|
||||
set.add(EnumPublic.Z);
|
||||
return set;
|
||||
private void callParent() {
|
||||
super.testParent();
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,28 +21,12 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 6505888
|
||||
* @summary Tests CheckedSet encoding
|
||||
* @author Sergey Malenkov
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
public class SuperClazz {
|
||||
public static void testParent() {
|
||||
System.out.println("SuperClazz: entering testParent()");
|
||||
|
||||
public final class java_util_Collections_CheckedSet extends AbstractTest<Set<String>> {
|
||||
public static void main(String[] args) {
|
||||
new java_util_Collections_CheckedSet().test(true);
|
||||
}
|
||||
|
||||
protected Set<String> getObject() {
|
||||
Set<String> set = Collections.singleton("string");
|
||||
return Collections.checkedSet(set, String.class);
|
||||
}
|
||||
|
||||
protected Set<String> getAnotherObject() {
|
||||
Set<String> set = Collections.emptySet();
|
||||
return Collections.checkedSet(set, String.class);
|
||||
// The line below will be used to check for successful class transformation
|
||||
System.out.println(TransformUtil.ParentCheckPattern + TransformUtil.BeforePattern);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,28 +21,25 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 6505888
|
||||
* @summary Tests CheckedList encoding
|
||||
* @author Sergey Malenkov
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
// Test Entry - a single entry in a test table
|
||||
// that defines a test case
|
||||
// See TransformRelatedClasses.java for more details
|
||||
public class TestEntry {
|
||||
int testCaseId;
|
||||
boolean transformParent;
|
||||
boolean transformChild;
|
||||
boolean isParentExpectedShared;
|
||||
boolean isChildExpectedShared;
|
||||
|
||||
public final class java_util_Collections_CheckedList extends AbstractTest<List<String>> {
|
||||
public static void main(String[] args) {
|
||||
new java_util_Collections_CheckedList().test(true);
|
||||
}
|
||||
|
||||
protected List<String> getObject() {
|
||||
List<String> list = Collections.singletonList("string");
|
||||
return Collections.checkedList(list, String.class);
|
||||
}
|
||||
|
||||
protected List<String> getAnotherObject() {
|
||||
List<String> list = Collections.emptyList();
|
||||
return Collections.checkedList(list, String.class);
|
||||
public TestEntry(int testCaseId,
|
||||
boolean transformParent, boolean transformChild,
|
||||
boolean isParentExpectedShared, boolean isChildExpectedShared) {
|
||||
this.testCaseId = testCaseId;
|
||||
this.transformParent = transformParent;
|
||||
this.transformChild = transformChild;
|
||||
this.isParentExpectedShared = isParentExpectedShared;
|
||||
this.isChildExpectedShared = isChildExpectedShared;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @summary Exercise initial transformation (ClassFileLoadHook)
|
||||
* with CDS with Interface/Implementor pair
|
||||
* @library /test/lib /runtime/SharedArchiveFile /testlibrary/jvmti
|
||||
* @requires (vm.opt.UseCompressedOops == null) | (vm.opt.UseCompressedOops == true)
|
||||
* @requires vm.flavor != "minimal"
|
||||
* @modules java.base/jdk.internal.misc
|
||||
* jdk.jartool/sun.tools.jar
|
||||
* java.management
|
||||
* java.instrument
|
||||
* @build TransformUtil TransformerAgent Interface Implementor
|
||||
* @run main/othervm TransformRelatedClasses Interface Implementor
|
||||
*/
|
||||
|
||||
// Clarification on @requires declarations:
|
||||
// CDS is not supported w/o the use of Compressed OOPs
|
||||
// JVMTI's ClassFileLoadHook is not supported under minimal VM
|
||||
|
||||
// This test class uses TransformRelatedClasses to do its work.
|
||||
// The goal of this test is to exercise transformation of related interface
|
||||
// and its implementor in combination with CDS.
|
||||
// The transformation is done via ClassFileLoadHook mechanism.
|
||||
// Both superclass and subclass reside in the shared archive.
|
||||
// The test consists of 4 test cases where transformation is applied
|
||||
// to an interface and an implementor in a combinatorial manner.
|
||||
// Please see TransformRelatedClasses.java for details.
|
@ -0,0 +1,179 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
|
||||
// This is the main test class for testing transformation of related classes
|
||||
// in combination with CDS, to ensure these features work well together.
|
||||
// The relationships that can be tested using this test class are:
|
||||
// superclass/subclass, and interface/implementor relationships.
|
||||
//
|
||||
// The test uses combinatorial approach.
|
||||
// For details on test table and test cases see main() method in this class.
|
||||
//
|
||||
// This test consists of multiple classes for better flexibility and reuse,
|
||||
// and also relies on certain common utility code.
|
||||
// Here are the details on the structure of the test
|
||||
//
|
||||
// Structure of the test:
|
||||
// TransformRelatedClasses -- common main test driver
|
||||
// The TransformRelatedClasses is invoked from test driver classes:
|
||||
// TransformInterfaceAndImplementor, TransformSuperAndSubClasses
|
||||
// It is responsible for preparing test artifacts (test jar, agent jar
|
||||
// and the shared archive), running test cases and checking the results.
|
||||
// The following test classes below are launched in a sub-process with use
|
||||
// of shared archive:
|
||||
// SuperClazz, SubClass -- super/sub class pair under test
|
||||
// Interface, Implementor -- classes under test
|
||||
// This test will transform these classes, based on the test case data,
|
||||
// by changing a predefined unique string in each class.
|
||||
// For more details, see the test classes' code and comments.
|
||||
//
|
||||
// Other related classes:
|
||||
// TestEntry - a class representing a single test case, as test entry in the table
|
||||
// TransformTestCommon - common methods for transformation test cases
|
||||
//
|
||||
// Other utility/helper classes and files used in this test:
|
||||
// TransformerAgent - an agent that is used when JVM-under-test is executed
|
||||
// to transform specific strings inside specified classes
|
||||
// TransformerAgent.mf - accompanies transformer agent
|
||||
// CDSTestUtils - Test Utilities common to all CDS tests
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import jdk.test.lib.process.OutputAnalyzer;
|
||||
import jdk.test.lib.process.ProcessTools;
|
||||
|
||||
|
||||
public class TransformRelatedClasses {
|
||||
static final String archiveName = "./TransformRelatedClasses.jsa";
|
||||
static String agentClasses[] = {
|
||||
"TransformerAgent",
|
||||
"TransformerAgent$SimpleTransformer",
|
||||
"TransformUtil"
|
||||
};
|
||||
|
||||
String parent;
|
||||
String child;
|
||||
String[] testClasses = new String[2];
|
||||
String[] testNames = new String[2];
|
||||
String testJar;
|
||||
String agentJar;
|
||||
|
||||
|
||||
private static void log(String msg) {
|
||||
System.out.println("TransformRelatedClasses: " + msg);
|
||||
}
|
||||
|
||||
|
||||
// This class is intended to test 2 parent-child relationships:
|
||||
// 1. Base Class (parent) and Derived Class (child)
|
||||
// 2. Interface (parent) and Implementor (child)
|
||||
// Parameters to main(): parent, child
|
||||
public static void main(String args[]) throws Exception {
|
||||
TransformRelatedClasses test = new TransformRelatedClasses(args[0], args[1]);
|
||||
test.prepare();
|
||||
|
||||
// Test Table
|
||||
// TestEntry: (testCaseId, transformParent, tranformChild,
|
||||
// isParentExpectedShared, isChildExpectedShared)
|
||||
ArrayList<TestEntry> testTable = new ArrayList<>();
|
||||
|
||||
// base case - no tranformation - all expected to be shared
|
||||
testTable.add(new TestEntry(0, false, false, true, true));
|
||||
|
||||
// transform parent only - both parent and child should not be shared
|
||||
testTable.add(new TestEntry(1, true, false, false, false));
|
||||
|
||||
// transform parent and child - both parent and child should not be shared
|
||||
testTable.add(new TestEntry(2, true, true, false, false));
|
||||
|
||||
// transform child only - parent should still be shared, but not child
|
||||
testTable.add(new TestEntry(3, false, true, true, false));
|
||||
|
||||
// run the tests
|
||||
for (TestEntry entry : testTable) {
|
||||
test.runTest(entry);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public TransformRelatedClasses(String parent, String child) {
|
||||
log("Constructor: parent = " + parent + ", child = " + child);
|
||||
this.parent = parent;
|
||||
this.child = child;
|
||||
testClasses[0] = parent;
|
||||
testClasses[1] = child;
|
||||
testNames[0] = parent.replace('.', '/');
|
||||
testNames[1] = child.replace('.', '/');
|
||||
}
|
||||
|
||||
|
||||
// same test jar and archive can be used for all test cases
|
||||
private void prepare() throws Exception {
|
||||
// create agent jar
|
||||
// Agent is the same for all test cases
|
||||
String pathToManifest = "../../../../testlibrary/jvmti/TransformerAgent.mf";
|
||||
agentJar = ClassFileInstaller.writeJar("TransformerAgent.jar",
|
||||
ClassFileInstaller.Manifest.fromSourceFile(pathToManifest),
|
||||
agentClasses);
|
||||
|
||||
// create a test jar
|
||||
testJar =
|
||||
ClassFileInstaller.writeJar(parent + "-" + child + ".jar",
|
||||
testClasses);
|
||||
|
||||
// create an archive
|
||||
File classList = CDSTestUtils.makeClassList("transform-" + parent,
|
||||
testNames);
|
||||
ProcessBuilder pb = ProcessTools.createJavaProcessBuilder(true,
|
||||
"-Xbootclasspath/a:" + testJar,
|
||||
"-XX:+UnlockDiagnosticVMOptions",
|
||||
"-XX:ExtraSharedClassListFile=" +
|
||||
classList.getPath(),
|
||||
"-XX:SharedArchiveFile=" + archiveName,
|
||||
"-XX:+PrintSharedSpaces",
|
||||
"-Xshare:dump");
|
||||
OutputAnalyzer out = new OutputAnalyzer(pb.start());
|
||||
CDSTestUtils.checkDump(out);
|
||||
}
|
||||
|
||||
|
||||
private void runTest(TestEntry entry) throws Exception {
|
||||
log("runTest(): testCaseId = " + entry.testCaseId);
|
||||
|
||||
// execute with archive
|
||||
String agentParam = "-javaagent:" + agentJar + "=" +
|
||||
TransformTestCommon.getAgentParams(entry, parent, child);
|
||||
|
||||
ProcessBuilder pb = ProcessTools.createJavaProcessBuilder(true,
|
||||
"-Xbootclasspath/a:" + testJar,
|
||||
"-XX:+UnlockDiagnosticVMOptions",
|
||||
"-XX:SharedArchiveFile=" + archiveName,
|
||||
"-Xlog:class+load=info",
|
||||
"-Xshare:on", "-showversion",
|
||||
agentParam, child);
|
||||
OutputAnalyzer out = new OutputAnalyzer(pb.start());
|
||||
|
||||
TransformTestCommon.checkResults(entry, out, parent, child);
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @summary Exercise initial transformation (ClassFileLoadHook)
|
||||
* with CDS with SubClass and SuperClass
|
||||
* @library /test/lib /runtime/SharedArchiveFile /testlibrary/jvmti
|
||||
* @requires (vm.opt.UseCompressedOops == null) | (vm.opt.UseCompressedOops == true)
|
||||
* @requires vm.flavor != "minimal"
|
||||
* @modules java.base/jdk.internal.misc
|
||||
* jdk.jartool/sun.tools.jar
|
||||
* java.management
|
||||
* java.instrument
|
||||
* @build TransformUtil TransformerAgent SubClass SuperClazz
|
||||
* @run main/othervm TransformRelatedClasses SuperClazz SubClass
|
||||
*/
|
||||
|
||||
// Clarification on @requires declarations:
|
||||
// CDS is not supported w/o the use of Compressed OOPs
|
||||
// JVMTI's ClassFileLoadHook is not supported under minimal VM
|
||||
|
||||
// This test class uses TransformRelatedClasses to do its work.
|
||||
// The goal of this test is to exercise transformation of related superclass
|
||||
// and subclass in combination with CDS.
|
||||
// The transformation is done via ClassFileLoadHook mechanism.
|
||||
// Both superclass and subclass reside in the shared archive.
|
||||
// The test consists of 4 test cases where transformation is applied
|
||||
// to a parent and child in combinatorial manner.
|
||||
// Please see TransformRelatedClasses.java for details.
|
@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @summary Exercise initial transformation (ClassFileLoadHook)
|
||||
* with CDS with SubClass and SuperClass, each lives in own separate package
|
||||
* @library /test/lib /runtime/SharedArchiveFile /testlibrary/jvmti
|
||||
* @requires (vm.opt.UseCompressedOops == null) | (vm.opt.UseCompressedOops == true)
|
||||
* @requires vm.flavor != "minimal"
|
||||
* @modules java.base/jdk.internal.misc
|
||||
* jdk.jartool/sun.tools.jar
|
||||
* java.management
|
||||
* java.instrument
|
||||
* @build TransformUtil TransformerAgent SubClass SuperClazz
|
||||
* @compile myPkg2/SubClass.java myPkg1/SuperClazz.java
|
||||
* @run main/othervm TransformRelatedClasses myPkg1.SuperClazz myPkg2.SubClass
|
||||
*/
|
||||
|
||||
// Clarification on @requires declarations:
|
||||
// CDS is not supported w/o the use of Compressed OOPs
|
||||
// JVMTI's ClassFileLoadHook is not supported under minimal VM
|
||||
|
||||
// This test class uses TransformRelatedClasses to do its work.
|
||||
// The goal of this test is to exercise transformation of related superclass
|
||||
// and subclass in combination with CDS; each class lives in its own package.
|
||||
// The transformation is done via ClassFileLoadHook mechanism.
|
||||
// Both superclass and subclass reside in the shared archive.
|
||||
// The test consists of 4 test cases where transformation is applied
|
||||
// to a parent and child in combinatorial manner.
|
||||
// Please see TransformRelatedClasses.java for details.
|
@ -0,0 +1,114 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
import jdk.test.lib.process.OutputAnalyzer;
|
||||
|
||||
|
||||
// This class contains methods common to all transformation test cases
|
||||
public class TransformTestCommon {
|
||||
|
||||
// get parameters to an agent depending on the test case
|
||||
// these parameters will instruct the agent which classes should be
|
||||
// transformed
|
||||
public static String getAgentParams(TestEntry entry,
|
||||
String parent, String child) {
|
||||
|
||||
if (entry.transformParent && entry.transformChild)
|
||||
return parent + "," + child;
|
||||
if (entry.transformParent)
|
||||
return parent;
|
||||
if (entry.transformChild)
|
||||
return child;
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
|
||||
private static void checkTransformationResults(TestEntry entry,
|
||||
OutputAnalyzer out)
|
||||
throws Exception {
|
||||
|
||||
if (entry.transformParent)
|
||||
out.shouldContain(TransformUtil.ParentCheckPattern +
|
||||
TransformUtil.AfterPattern);
|
||||
|
||||
if (entry.transformChild)
|
||||
out.shouldContain(TransformUtil.ChildCheckPattern +
|
||||
TransformUtil.AfterPattern);
|
||||
}
|
||||
|
||||
|
||||
private static void checkSharingByClass(TestEntry entry, OutputAnalyzer out,
|
||||
String parent, String child)
|
||||
throws Exception {
|
||||
|
||||
String parentSharedMatch = parent + " source: shared objects file";
|
||||
String childSharedMatch = child + " source: shared objects file";
|
||||
|
||||
if (entry.isParentExpectedShared)
|
||||
out.shouldContain(parentSharedMatch);
|
||||
else
|
||||
out.shouldNotContain(parentSharedMatch);
|
||||
|
||||
if (entry.isChildExpectedShared)
|
||||
out.shouldContain(childSharedMatch);
|
||||
else
|
||||
out.shouldNotContain(childSharedMatch);
|
||||
}
|
||||
|
||||
|
||||
// Both parent and child classes should be passed to ClassFileTransformer.transform()
|
||||
// exactly once.
|
||||
private static void checkTransformationCounts(TestEntry entry, OutputAnalyzer out,
|
||||
String parent, String child)
|
||||
throws Exception {
|
||||
|
||||
String patternBase = "TransformerAgent: SimpleTransformer called for: ";
|
||||
|
||||
out.shouldContain(patternBase + child + "@1");
|
||||
out.shouldContain(patternBase + parent + "@1");
|
||||
|
||||
out.shouldNotContain(patternBase + child + "@2");
|
||||
out.shouldNotContain(patternBase + parent + "@2");
|
||||
}
|
||||
|
||||
|
||||
public static void checkResults(TestEntry entry, OutputAnalyzer out,
|
||||
String parent, String child)
|
||||
throws Exception {
|
||||
|
||||
// If we were not able to map an archive,
|
||||
// then do not perform other checks, since
|
||||
// there was no sharing at all
|
||||
if (CDSTestUtils.isUnableToMap(out))
|
||||
return;
|
||||
|
||||
String childVmName = child.replace('.', '/');
|
||||
String parentVmName = parent.replace('.', '/');
|
||||
|
||||
CDSTestUtils.checkExec(out);
|
||||
checkTransformationCounts(entry, out, parentVmName, childVmName);
|
||||
checkTransformationResults(entry, out);
|
||||
checkSharingByClass(entry, out, parent, child);
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
package myPkg1;
|
||||
|
||||
public class SuperClazz {
|
||||
public static void testParent() {
|
||||
System.out.println("SuperClazz: entering testParent()");
|
||||
|
||||
// The line below will be used to check for successful class transformation
|
||||
System.out.println("parent-transform-check: this-should-be-transformed");
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -21,30 +21,36 @@
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 6505888
|
||||
* @summary Tests CheckedSortedMap encoding
|
||||
* @author Sergey Malenkov
|
||||
*/
|
||||
package myPkg2;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import myPkg1.SuperClazz;
|
||||
|
||||
public final class java_util_Collections_CheckedSortedMap extends AbstractTest<SortedMap<String, String>> {
|
||||
public class SubClass extends SuperClazz {
|
||||
public static void main(String[] args) {
|
||||
new java_util_Collections_CheckedSortedMap().test(true);
|
||||
System.out.println("SubClass: entering main()");
|
||||
test();
|
||||
}
|
||||
|
||||
protected SortedMap<String, String> getObject() {
|
||||
SortedMap<String, String> map = new TreeMap<String, String>();
|
||||
map.put("key", "value");
|
||||
return Collections.checkedSortedMap(map, String.class, String.class);
|
||||
public static void test() {
|
||||
// The line below will be used to check for successful class transformation
|
||||
System.out.println("child-transform-check: this-should-be-transformed");
|
||||
(new SubClass()).callParent();
|
||||
|
||||
// Get the system packages, which should contain myPkg1 and myPkag2
|
||||
Package[] pkgs = Package.getPackages();
|
||||
for (int i = 0; i < pkgs.length; i++) {
|
||||
if (pkgs[i].getName().equals("myPkg1")) {
|
||||
for (int j = 0; j < pkgs.length; j++) {
|
||||
if (pkgs[j].getName().equals("myPkg2")) {
|
||||
return; // found myPkg1 & myPkg1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new RuntimeException("Missing system package");
|
||||
}
|
||||
|
||||
protected SortedMap<String, String> getAnotherObject() {
|
||||
SortedMap<String, String> map = new TreeMap<String, String>();
|
||||
return Collections.checkedSortedMap(map, String.class, String.class);
|
||||
private void callParent() {
|
||||
super.testParent();
|
||||
}
|
||||
}
|
75
hotspot/test/testlibrary/jvmti/TransformUtil.java
Normal file
75
hotspot/test/testlibrary/jvmti/TransformUtil.java
Normal file
@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
|
||||
public class TransformUtil {
|
||||
public static final String BeforePattern = "this-should-be-transformed";
|
||||
public static final String AfterPattern = "this-has-been--transformed";
|
||||
public static final String ParentCheckPattern = "parent-transform-check: ";
|
||||
public static final String ChildCheckPattern = "child-transform-check: ";
|
||||
|
||||
/**
|
||||
* @return the number of occurrences of the <code>from</code> string that
|
||||
* have been replaced.
|
||||
*/
|
||||
public static int replace(byte buff[], String from, String to) {
|
||||
if (to.length() != from.length()) {
|
||||
throw new RuntimeException("bad strings");
|
||||
}
|
||||
byte f[] = asciibytes(from);
|
||||
byte t[] = asciibytes(to);
|
||||
byte f0 = f[0];
|
||||
|
||||
int numReplaced = 0;
|
||||
int max = buff.length - f.length;
|
||||
for (int i = 0; i < max; ) {
|
||||
if (buff[i] == f0 && replace(buff, f, t, i)) {
|
||||
i += f.length;
|
||||
numReplaced++;
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return numReplaced;
|
||||
}
|
||||
|
||||
public static boolean replace(byte buff[], byte f[], byte t[], int i) {
|
||||
for (int x = 0; x < f.length; x++) {
|
||||
if (buff[x+i] != f[x]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (int x = 0; x < f.length; x++) {
|
||||
buff[x+i] = t[x];
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static byte[] asciibytes(String s) {
|
||||
byte b[] = new byte[s.length()];
|
||||
for (int i = 0; i < b.length; i++) {
|
||||
b[i] = (byte)s.charAt(i);
|
||||
}
|
||||
return b;
|
||||
}
|
||||
}
|
100
hotspot/test/testlibrary/jvmti/TransformerAgent.java
Normal file
100
hotspot/test/testlibrary/jvmti/TransformerAgent.java
Normal file
@ -0,0 +1,100 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
import java.lang.instrument.ClassFileTransformer;
|
||||
import java.lang.instrument.IllegalClassFormatException;
|
||||
import java.lang.instrument.Instrumentation;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.HashMap;
|
||||
|
||||
// This is a test utility class used to transform
|
||||
// specified classes via initial transformation (ClassFileLoadHook).
|
||||
// Names of classes to be transformed are supplied as arguments,
|
||||
// the phrase to be transformed is a hard-coded predefined
|
||||
// fairly unique phrase.
|
||||
|
||||
public class TransformerAgent {
|
||||
private static String[] classesToTransform;
|
||||
|
||||
|
||||
private static void log(String msg) {
|
||||
System.out.println("TransformerAgent: " + msg);
|
||||
}
|
||||
|
||||
|
||||
// arguments are comma-separated list of classes to transform
|
||||
public static void premain(String agentArguments, Instrumentation instrumentation) {
|
||||
log("premain() is called, arguments = " + agentArguments);
|
||||
classesToTransform = agentArguments.split(",");
|
||||
instrumentation.addTransformer(new SimpleTransformer(), /*canRetransform=*/true);
|
||||
}
|
||||
|
||||
|
||||
public static void agentmain(String args, Instrumentation inst) throws Exception {
|
||||
log("agentmain() is called");
|
||||
premain(args, inst);
|
||||
}
|
||||
|
||||
|
||||
static class SimpleTransformer implements ClassFileTransformer {
|
||||
public byte[] transform(ClassLoader loader, String name, Class<?> classBeingRedefined,
|
||||
ProtectionDomain pd, byte[] buffer) throws IllegalClassFormatException {
|
||||
|
||||
log("SimpleTransformer called for: " + name + "@" + incrCounter(name));
|
||||
if (!shouldTransform(name))
|
||||
return null;
|
||||
|
||||
log("transforming: class name = " + name);
|
||||
int nrOfReplacements = TransformUtil.replace(buffer, TransformUtil.BeforePattern,
|
||||
TransformUtil.AfterPattern);
|
||||
log("replaced the string, nrOfReplacements = " + nrOfReplacements);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// Check class name pattern, since test should only transform certain classes
|
||||
private static boolean shouldTransform(String name) {
|
||||
for (String match : classesToTransform) {
|
||||
if (name.matches(match)) {
|
||||
log("shouldTransform: match-found, match = " + match);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static HashMap<String, Integer> counterMap = new HashMap<>();
|
||||
|
||||
static Integer incrCounter(String className) {
|
||||
Integer i = counterMap.get(className);
|
||||
if (i == null) {
|
||||
i = new Integer(1);
|
||||
} else {
|
||||
i = new Integer(i.intValue() + 1);
|
||||
}
|
||||
counterMap.put(className, i);
|
||||
return i;
|
||||
}
|
||||
}
|
5
hotspot/test/testlibrary/jvmti/TransformerAgent.mf
Normal file
5
hotspot/test/testlibrary/jvmti/TransformerAgent.mf
Normal file
@ -0,0 +1,5 @@
|
||||
Manifest-Version: 1.0
|
||||
Premain-Class: TransformerAgent
|
||||
Agent-Class: TransformerAgent
|
||||
Can-Retransform-Classes: true
|
||||
Can-Redefine-Classes: false
|
@ -380,3 +380,4 @@ e66cdc2de6b02443911d386fc9217b0d824d0686 jdk-9+130
|
||||
f695240370c77a25fed88225a392e7d530cb4d78 jdk-9+135
|
||||
f1eafcb0eb7182b937bc93f214d8cabd01ec4d59 jdk-9+136
|
||||
a8d5fe567ae72b4931040e59dd4478363f9004f5 jdk-9+137
|
||||
69c3b12ba75b2e321dee731ac545e7fbff608451 jdk-9+138
|
||||
|
@ -383,3 +383,4 @@ ab1d78d395d4cb8be426ff181211da1a4085cf01 jdk-9+134
|
||||
22631824f55128a7ab6605493b3001a37af6a168 jdk-9+135
|
||||
09ec13a99f50a4a346180d1e3b0fd8bc1ee399ce jdk-9+136
|
||||
297c16d401c534cb879809d2a746d21ca99d2954 jdk-9+137
|
||||
7d3a8f52b124db26ba8425c2931b748dd9d2791b jdk-9+138
|
||||
|
@ -380,3 +380,4 @@ d5c70818cd8a82e76632c8c815bdb4f75f53aeaf jdk-9+132
|
||||
021369229cfd0b5feb76834b2ea498f47f43c0f3 jdk-9+135
|
||||
54c5931849a33a363e03fdffa141503f5cc4779d jdk-9+136
|
||||
e72df94364e3686e7d62059ce0d6b187b82da713 jdk-9+137
|
||||
665096863382bf23ce891307cf2a7511e77c1c88 jdk-9+138
|
||||
|
42
jdk/make/CompileModuleTools.gmk
Normal file
42
jdk/make/CompileModuleTools.gmk
Normal file
@ -0,0 +1,42 @@
|
||||
#
|
||||
# Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
#
|
||||
# This code is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License version 2 only, as
|
||||
# published by the Free Software Foundation. Oracle designates this
|
||||
# particular file as subject to the "Classpath" exception as provided
|
||||
# by Oracle in the LICENSE file that accompanied this code.
|
||||
#
|
||||
# This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# version 2 for more details (a copy is included in the LICENSE file that
|
||||
# accompanied this code).
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License version
|
||||
# 2 along with this work; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
# or visit www.oracle.com if you need additional information or have any
|
||||
# questions.
|
||||
#
|
||||
|
||||
include $(SPEC)
|
||||
include MakeBase.gmk
|
||||
include JavaCompilation.gmk
|
||||
include SetupJavaCompilers.gmk
|
||||
|
||||
TOOLS_CLASSES_DIR := $(BUILDTOOLS_OUTPUTDIR)/tools_jigsaw_classes
|
||||
|
||||
$(eval $(call SetupJavaCompilation,BUILD_JIGSAW_TOOLS, \
|
||||
SETUP := GENERATE_USINGJDKBYTECODE, \
|
||||
SRC := $(JDK_TOPDIR)/make/src/classes, \
|
||||
INCLUDES := build/tools/deps \
|
||||
build/tools/jigsaw, \
|
||||
BIN := $(TOOLS_CLASSES_DIR), \
|
||||
ADD_JAVAC_FLAGS := \
|
||||
--add-exports jdk.jdeps/com.sun.tools.classfile=ALL-UNNAMED \
|
||||
--add-exports java.base/jdk.internal.module=ALL-UNNAMED \
|
||||
))
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
# Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
#
|
||||
# This code is free software; you can redistribute it and/or modify it
|
||||
@ -26,18 +26,14 @@
|
||||
include $(SPEC)
|
||||
include MakeBase.gmk
|
||||
include JavaCompilation.gmk
|
||||
include SetupJavaCompilers.gmk
|
||||
|
||||
TOOLS_CLASSES_DIR := $(BUILDTOOLS_OUTPUTDIR)/tools_jigsaw_classes
|
||||
|
||||
$(eval $(call SetupJavaCompilation,BUILD_JIGSAW_TOOLS, \
|
||||
SETUP := GENERATE_USINGJDKBYTECODE, \
|
||||
SRC := $(JDK_TOPDIR)/make/src/classes, \
|
||||
INCLUDES := build/tools/deps \
|
||||
build/tools/jigsaw, \
|
||||
BIN := $(TOOLS_CLASSES_DIR), \
|
||||
ADD_JAVAC_FLAGS := --add-exports jdk.jdeps/com.sun.tools.classfile=ALL-UNNAMED ))
|
||||
|
||||
# To avoid reevaluating the compilation setup for the tools each time this file
|
||||
# is included, the actual compilation is handled by CompileModuleTools.gmk. The
|
||||
# following trick is used to be able to declare a dependency on the built tools.
|
||||
BUILD_TOOLS_JDK := $(call SetupJavaCompilationCompileTarget, \
|
||||
BUILD_JIGSAW_TOOLS, $(TOOLS_CLASSES_DIR))
|
||||
|
||||
TOOL_GENGRAPHS := $(BUILD_JAVA) -esa -ea -cp $(TOOLS_CLASSES_DIR) \
|
||||
build.tools.jigsaw.GenGraphs
|
||||
@ -45,3 +41,8 @@ TOOL_GENGRAPHS := $(BUILD_JAVA) -esa -ea -cp $(TOOLS_CLASSES_DIR) \
|
||||
TOOL_MODULESUMMARY := $(BUILD_JAVA) -esa -ea -cp $(TOOLS_CLASSES_DIR) \
|
||||
--add-exports jdk.jdeps/com.sun.tools.classfile=ALL-UNNAMED \
|
||||
build.tools.jigsaw.ModuleSummary
|
||||
|
||||
TOOL_ADD_PACKAGES_ATTRIBUTE := $(BUILD_JAVA) $(JAVA_FLAGS_SMALL) \
|
||||
-cp $(TOOLS_CLASSES_DIR) \
|
||||
--add-exports java.base/jdk.internal.module=ALL-UNNAMED \
|
||||
build.tools.jigsaw.AddPackagesAttribute
|
||||
|
@ -236,10 +236,6 @@ TARGETS += $(BUILD_LIBZIP)
|
||||
|
||||
##########################################################################################
|
||||
|
||||
ifeq ($(OPENJDK_TARGET_OS), aix)
|
||||
LIBJIMAGE_TOOLCHAIN := TOOLCHAIN_LINK_CXX
|
||||
endif # OPENJDK_TARGET_OS aix
|
||||
|
||||
JIMAGELIB_CPPFLAGS := \
|
||||
-I$(JDK_TOPDIR)/src/java.base/share/native/libjava \
|
||||
-I$(JDK_TOPDIR)/src/java.base/$(OPENJDK_TARGET_OS_TYPE)/native/libjava \
|
||||
@ -249,7 +245,7 @@ JIMAGELIB_CPPFLAGS := \
|
||||
|
||||
$(eval $(call SetupNativeCompilation,BUILD_LIBJIMAGE, \
|
||||
LIBRARY := jimage, \
|
||||
TOOLCHAIN := $(LIBJIMAGE_TOOLCHAIN), \
|
||||
TOOLCHAIN := TOOLCHAIN_LINK_CXX, \
|
||||
OUTPUT_DIR := $(INSTALL_LIBRARIES_HERE), \
|
||||
OPTIMIZATION := LOW, \
|
||||
SRC := $(JDK_TOPDIR)/src/java.base/share/native/libjimage \
|
||||
|
@ -53,12 +53,7 @@ ifeq ($(OPENJDK_TARGET_OS), solaris)
|
||||
endif
|
||||
|
||||
ifeq ($(OPENJDK_TARGET_OS), aix)
|
||||
BUILD_LIBNIO_MAPFILE:=$(JDK_TOPDIR)/make/mapfiles/libnio/mapfile-$(OPENJDK_TARGET_OS)
|
||||
BUILD_LIBNIO_EXFILES += \
|
||||
/NativeThread.c
|
||||
# Notice: we really need the leading slash here because otherwise every
|
||||
# FILE_NAME in EXCLUDE_FILES will actually match any file ending in FILE_NAME
|
||||
# (e.g. 'NativeThread.c' will also exclude 'AixNativeThread.c').
|
||||
BUILD_LIBNIO_MAPFILE := $(JDK_TOPDIR)/make/mapfiles/libnio/mapfile-$(OPENJDK_TARGET_OS)
|
||||
endif
|
||||
|
||||
$(eval $(call SetupNativeCompilation,BUILD_LIBNIO, \
|
||||
|
@ -37,7 +37,7 @@ else
|
||||
RMIC_MAIN_CLASS := sun.rmi.rmic.Main
|
||||
endif
|
||||
|
||||
RMIC := $(JAVA) $(INTERIM_OVERRIDE_MODULES_ARGS) $(RMIC_MAIN_CLASS)
|
||||
RMIC := $(JAVA_SMALL) $(INTERIM_OVERRIDE_MODULES_ARGS) $(RMIC_MAIN_CLASS)
|
||||
|
||||
CLASSES_DIR := $(JDK_OUTPUTDIR)/modules
|
||||
# NOTE: If the smart javac dependency management is reintroduced, these classes risk
|
||||
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation. Oracle designates this
|
||||
* particular file as subject to the "Classpath" exception as provided
|
||||
* by Oracle in the LICENSE file that accompanied this code.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
package build.tools.jigsaw;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.lang.module.ModuleFinder;
|
||||
import java.lang.module.ModuleReference;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import jdk.internal.module.ModuleInfoExtender;
|
||||
|
||||
/**
|
||||
* Adds the Packages class file attribute to each module-info.class in an
|
||||
* exploded build.
|
||||
*/
|
||||
|
||||
public class AddPackagesAttribute {
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
|
||||
if (args.length != 1) {
|
||||
System.err.println("Usage AddPackagesAttribute exploded-java-home");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
String home = args[0];
|
||||
Path dir = Paths.get(home, "modules");
|
||||
|
||||
ModuleFinder finder = ModuleFinder.of(dir);
|
||||
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
|
||||
for (Path entry : stream) {
|
||||
Path mi = entry.resolve("module-info.class");
|
||||
if (Files.isRegularFile(mi)) {
|
||||
String mn = entry.getFileName().toString();
|
||||
Optional<ModuleReference> omref = finder.find(mn);
|
||||
if (omref.isPresent()) {
|
||||
Set<String> packages = omref.get().descriptor().conceals();
|
||||
addPackagesAttribute(mi, packages);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void addPackagesAttribute(Path mi, Set<String> packages) throws IOException {
|
||||
byte[] bytes;
|
||||
try (InputStream in = Files.newInputStream(mi)) {
|
||||
ModuleInfoExtender extender = ModuleInfoExtender.newExtender(in);
|
||||
extender.conceals(packages);
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
extender.write(baos);
|
||||
bytes = baos.toByteArray();
|
||||
}
|
||||
|
||||
Files.write(mi, bytes);
|
||||
}
|
||||
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2002, 2014, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation. Oracle designates this
|
||||
* particular file as subject to the "Classpath" exception as provided
|
||||
* by Oracle in the LICENSE file that accompanied this code.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <string.h>
|
||||
#include "jni.h"
|
||||
#include "jni_util.h"
|
||||
#include "jvm.h"
|
||||
#include "jlong.h"
|
||||
#include "sun_nio_ch_NativeThread.h"
|
||||
|
||||
#include <pthread.h>
|
||||
#include <sys/signal.h>
|
||||
|
||||
/* Also defined in src/aix/native/java/net/aix_close.c */
|
||||
#define INTERRUPT_SIGNAL (SIGRTMAX - 1)
|
||||
|
||||
static void
|
||||
nullHandler(int sig)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_sun_nio_ch_NativeThread_init(JNIEnv *env, jclass cl)
|
||||
{
|
||||
/* Install the null handler for INTERRUPT_SIGNAL. This might overwrite the
|
||||
* handler previously installed by java/net/aix_close.c, but that's okay
|
||||
* since neither handler actually does anything. We install our own
|
||||
* handler here simply out of paranoia; ultimately the two mechanisms
|
||||
* should somehow be unified, perhaps within the VM.
|
||||
*/
|
||||
|
||||
sigset_t ss;
|
||||
struct sigaction sa, osa;
|
||||
sa.sa_handler = nullHandler;
|
||||
sa.sa_flags = 0;
|
||||
sigemptyset(&sa.sa_mask);
|
||||
if (sigaction(INTERRUPT_SIGNAL, &sa, &osa) < 0)
|
||||
JNU_ThrowIOExceptionWithLastError(env, "sigaction");
|
||||
}
|
||||
|
||||
JNIEXPORT jlong JNICALL
|
||||
Java_sun_nio_ch_NativeThread_current(JNIEnv *env, jclass cl)
|
||||
{
|
||||
return (long)pthread_self();
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_sun_nio_ch_NativeThread_signal(JNIEnv *env, jclass cl, jlong thread)
|
||||
{
|
||||
if (pthread_kill((pthread_t)thread, INTERRUPT_SIGNAL))
|
||||
JNU_ThrowIOExceptionWithLastError(env, "Thread signal failed");
|
||||
}
|
631
jdk/src/java.base/share/classes/java/io/ObjectInputFilter.java
Normal file
631
jdk/src/java.base/share/classes/java/io/ObjectInputFilter.java
Normal file
@ -0,0 +1,631 @@
|
||||
/*
|
||||
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation. Oracle designates this
|
||||
* particular file as subject to the "Classpath" exception as provided
|
||||
* by Oracle in the LICENSE file that accompanied this code.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
package java.io;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.security.Security;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
|
||||
/**
|
||||
* Filter classes, array lengths, and graph metrics during deserialization.
|
||||
* If set on an {@link ObjectInputStream}, the {@link #checkInput checkInput(FilterInfo)}
|
||||
* method is called to validate classes, the length of each array,
|
||||
* the number of objects being read from the stream, the depth of the graph,
|
||||
* and the total number of bytes read from the stream.
|
||||
* <p>
|
||||
* A filter can be set via {@link ObjectInputStream#setObjectInputFilter setObjectInputFilter}
|
||||
* for an individual ObjectInputStream.
|
||||
* A filter can be set via {@link Config#setSerialFilter(ObjectInputFilter) Config.setSerialFilter}
|
||||
* to affect every {@code ObjectInputStream} that does not otherwise set a filter.
|
||||
* <p>
|
||||
* A filter determines whether the arguments are {@link Status#ALLOWED ALLOWED}
|
||||
* or {@link Status#REJECTED REJECTED} and should return the appropriate status.
|
||||
* If the filter cannot determine the status it should return
|
||||
* {@link Status#UNDECIDED UNDECIDED}.
|
||||
* Filters should be designed for the specific use case and expected types.
|
||||
* A filter designed for a particular use may be passed a class that is outside
|
||||
* of the scope of the filter. If the purpose of the filter is to black-list classes
|
||||
* then it can reject a candidate class that matches and report UNDECIDED for others.
|
||||
* A filter may be called with class equals {@code null}, {@code arrayLength} equal -1,
|
||||
* the depth, number of references, and stream size and return a status
|
||||
* that reflects only one or only some of the values.
|
||||
* This allows a filter to specific about the choice it is reporting and
|
||||
* to use other filters without forcing either allowed or rejected status.
|
||||
*
|
||||
* <p>
|
||||
* Typically, a custom filter should check if a process-wide filter
|
||||
* is configured and defer to it if so. For example,
|
||||
* <pre>{@code
|
||||
* ObjectInputFilter.Status checkInput(FilterInfo info) {
|
||||
* ObjectInputFilter serialFilter = ObjectInputFilter.Config.getSerialFilter();
|
||||
* if (serialFilter != null) {
|
||||
* ObjectInputFilter.Status status = serialFilter.checkInput(info);
|
||||
* if (status != ObjectInputFilter.Status.UNDECIDED) {
|
||||
* // The process-wide filter overrides this filter
|
||||
* return status;
|
||||
* }
|
||||
* }
|
||||
* if (info.serialClass() != null &&
|
||||
* Remote.class.isAssignableFrom(info.serialClass())) {
|
||||
* return Status.REJECTED; // Do not allow Remote objects
|
||||
* }
|
||||
* return Status.UNDECIDED;
|
||||
* }
|
||||
*}</pre>
|
||||
* <p>
|
||||
* Unless otherwise noted, passing a {@code null} argument to a
|
||||
* method in this interface and its nested classes will cause a
|
||||
* {@link NullPointerException} to be thrown.
|
||||
*
|
||||
* @see ObjectInputStream#setObjectInputFilter(ObjectInputFilter)
|
||||
* @since 9
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ObjectInputFilter {
|
||||
|
||||
/**
|
||||
* Check the class, array length, number of object references, depth,
|
||||
* stream size, and other available filtering information.
|
||||
* Implementations of this method check the contents of the object graph being created
|
||||
* during deserialization. The filter returns {@link Status#ALLOWED Status.ALLOWED},
|
||||
* {@link Status#REJECTED Status.REJECTED}, or {@link Status#UNDECIDED Status.UNDECIDED}.
|
||||
*
|
||||
* @param filterInfo provides information about the current object being deserialized,
|
||||
* if any, and the status of the {@link ObjectInputStream}
|
||||
* @return {@link Status#ALLOWED Status.ALLOWED} if accepted,
|
||||
* {@link Status#REJECTED Status.REJECTED} if rejected,
|
||||
* {@link Status#UNDECIDED Status.UNDECIDED} if undecided.
|
||||
* @since 9
|
||||
*/
|
||||
Status checkInput(FilterInfo filterInfo);
|
||||
|
||||
/**
|
||||
* FilterInfo provides access to information about the current object
|
||||
* being deserialized and the status of the {@link ObjectInputStream}.
|
||||
* @since 9
|
||||
*/
|
||||
interface FilterInfo {
|
||||
/**
|
||||
* The class of an object being deserialized.
|
||||
* For arrays, it is the array type.
|
||||
* For example, the array class name of a 2 dimensional array of strings is
|
||||
* "{@code [[Ljava.lang.String;}".
|
||||
* To check the array's element type, iteratively use
|
||||
* {@link Class#getComponentType() Class.getComponentType} while the result
|
||||
* is an array and then check the class.
|
||||
* The {@code serialClass is null} in the case where a new object is not being
|
||||
* created and to give the filter a chance to check the depth, number of
|
||||
* references to existing objects, and the stream size.
|
||||
*
|
||||
* @return class of an object being deserialized; may be null
|
||||
*/
|
||||
Class<?> serialClass();
|
||||
|
||||
/**
|
||||
* The number of array elements when deserializing an array of the class.
|
||||
*
|
||||
* @return the non-negative number of array elements when deserializing
|
||||
* an array of the class, otherwise -1
|
||||
*/
|
||||
long arrayLength();
|
||||
|
||||
/**
|
||||
* The current depth.
|
||||
* The depth starts at {@code 1} and increases for each nested object and
|
||||
* decrements when each nested object returns.
|
||||
*
|
||||
* @return the current depth
|
||||
*/
|
||||
long depth();
|
||||
|
||||
/**
|
||||
* The current number of object references.
|
||||
*
|
||||
* @return the non-negative current number of object references
|
||||
*/
|
||||
long references();
|
||||
|
||||
/**
|
||||
* The current number of bytes consumed.
|
||||
* @implSpec {@code streamBytes} is implementation specific
|
||||
* and may not be directly related to the object in the stream
|
||||
* that caused the callback.
|
||||
*
|
||||
* @return the non-negative current number of bytes consumed
|
||||
*/
|
||||
long streamBytes();
|
||||
}
|
||||
|
||||
/**
|
||||
* The status of a check on the class, array length, number of references,
|
||||
* depth, and stream size.
|
||||
*
|
||||
* @since 9
|
||||
*/
|
||||
enum Status {
|
||||
/**
|
||||
* The status is undecided, not allowed and not rejected.
|
||||
*/
|
||||
UNDECIDED,
|
||||
/**
|
||||
* The status is allowed.
|
||||
*/
|
||||
ALLOWED,
|
||||
/**
|
||||
* The status is rejected.
|
||||
*/
|
||||
REJECTED;
|
||||
}
|
||||
|
||||
/**
|
||||
* A utility class to set and get the process-wide filter or create a filter
|
||||
* from a pattern string. If a process-wide filter is set, it will be
|
||||
* used for each {@link ObjectInputStream} that does not set its own filter.
|
||||
* <p>
|
||||
* When setting the filter, it should be stateless and idempotent,
|
||||
* reporting the same result when passed the same arguments.
|
||||
* <p>
|
||||
* The filter is configured using the {@link java.security.Security}
|
||||
* property {@code jdk.serialFilter} and can be overridden by
|
||||
* the System property {@code jdk.serialFilter}.
|
||||
*
|
||||
* The syntax is the same as for the {@link #createFilter(String) createFilter} method.
|
||||
*
|
||||
* @since 9
|
||||
*/
|
||||
final class Config {
|
||||
/* No instances. */
|
||||
private Config() {}
|
||||
|
||||
/**
|
||||
* Lock object for process-wide filter.
|
||||
*/
|
||||
private final static Object serialFilterLock = new Object();
|
||||
|
||||
/**
|
||||
* Debug: Logger
|
||||
*/
|
||||
private final static System.Logger configLog;
|
||||
|
||||
/**
|
||||
* Logger for debugging.
|
||||
*/
|
||||
static void filterLog(System.Logger.Level level, String msg, Object... args) {
|
||||
if (configLog != null) {
|
||||
configLog.log(level, msg, args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The name for the process-wide deserialization filter.
|
||||
* Used as a system property and a java.security.Security property.
|
||||
*/
|
||||
private final static String SERIAL_FILTER_PROPNAME = "jdk.serialFilter";
|
||||
|
||||
/**
|
||||
* The process-wide filter; may be null.
|
||||
* Lookup the filter in java.security.Security or
|
||||
* the system property.
|
||||
*/
|
||||
private final static ObjectInputFilter configuredFilter;
|
||||
|
||||
static {
|
||||
configuredFilter = AccessController
|
||||
.doPrivileged((PrivilegedAction<ObjectInputFilter>) () -> {
|
||||
String props = System.getProperty(SERIAL_FILTER_PROPNAME);
|
||||
if (props == null) {
|
||||
props = Security.getProperty(SERIAL_FILTER_PROPNAME);
|
||||
}
|
||||
if (props != null) {
|
||||
System.Logger log =
|
||||
System.getLogger("java.io.serialization");
|
||||
log.log(System.Logger.Level.INFO,
|
||||
"Creating serialization filter from {0}", props);
|
||||
try {
|
||||
return createFilter(props);
|
||||
} catch (RuntimeException re) {
|
||||
log.log(System.Logger.Level.ERROR,
|
||||
"Error configuring filter: {0}", re);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
});
|
||||
configLog = (configuredFilter != null) ? System.getLogger("java.io.serialization") : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Current configured filter.
|
||||
*/
|
||||
private static ObjectInputFilter serialFilter = configuredFilter;
|
||||
|
||||
/**
|
||||
* Returns the process-wide serialization filter or {@code null} if not configured.
|
||||
*
|
||||
* @return the process-wide serialization filter or {@code null} if not configured
|
||||
*/
|
||||
public static ObjectInputFilter getSerialFilter() {
|
||||
synchronized (serialFilterLock) {
|
||||
return serialFilter;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the process-wide filter if it has not already been configured or set.
|
||||
*
|
||||
* @param filter the serialization filter to set as the process-wide filter; not null
|
||||
* @throws SecurityException if there is security manager and the
|
||||
* {@code SerializablePermission("serialFilter")} is not granted
|
||||
* @throws IllegalStateException if the filter has already been set {@code non-null}
|
||||
*/
|
||||
public static void setSerialFilter(ObjectInputFilter filter) {
|
||||
Objects.requireNonNull(filter, "filter");
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPermission(ObjectStreamConstants.SERIAL_FILTER_PERMISSION);
|
||||
}
|
||||
synchronized (serialFilterLock) {
|
||||
if (serialFilter != null) {
|
||||
throw new IllegalStateException("Serial filter can only be set once");
|
||||
}
|
||||
serialFilter = filter;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an ObjectInputFilter from a string of patterns.
|
||||
* <p>
|
||||
* Patterns are separated by ";" (semicolon). Whitespace is significant and
|
||||
* is considered part of the pattern.
|
||||
* If a pattern includes an equals assignment, "{@code =}" it sets a limit.
|
||||
* If a limit appears more than once the last value is used.
|
||||
* <ul>
|
||||
* <li>maxdepth={@code value} - the maximum depth of a graph</li>
|
||||
* <li>maxrefs={@code value} - the maximum number of internal references</li>
|
||||
* <li>maxbytes={@code value} - the maximum number of bytes in the input stream</li>
|
||||
* <li>maxarray={@code value} - the maximum array length allowed</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Other patterns match or reject class or package name
|
||||
* as returned from {@link Class#getName() Class.getName()} and
|
||||
* if an optional module name is present
|
||||
* {@link java.lang.reflect.Module#getName() class.getModule().getName()}.
|
||||
* Note that for arrays the element type is used in the pattern,
|
||||
* not the array type.
|
||||
* <ul>
|
||||
* <li>If the pattern starts with "!", the class is rejected if the remaining pattern is matched;
|
||||
* otherwise the class is allowed if the pattern matches.
|
||||
* <li>If the pattern contains "/", the non-empty prefix up to the "/" is the module name;
|
||||
* if the module name matches the module name of the class then
|
||||
* the remaining pattern is matched with the class name.
|
||||
* If there is no "/", the module name is not compared.
|
||||
* <li>If the pattern ends with ".**" it matches any class in the package and all subpackages.
|
||||
* <li>If the pattern ends with ".*" it matches any class in the package.
|
||||
* <li>If the pattern ends with "*", it matches any class with the pattern as a prefix.
|
||||
* <li>If the pattern is equal to the class name, it matches.
|
||||
* <li>Otherwise, the pattern is not matched.
|
||||
* </ul>
|
||||
* <p>
|
||||
* The resulting filter performs the limit checks and then
|
||||
* tries to match the class, if any. If any of the limits are exceeded,
|
||||
* the filter returns {@link Status#REJECTED Status.REJECTED}.
|
||||
* If the class is an array type, the class to be matched is the element type.
|
||||
* Arrays of any number of dimensions are treated the same as the element type.
|
||||
* For example, a pattern of "{@code !example.Foo}",
|
||||
* rejects creation of any instance or array of {@code example.Foo}.
|
||||
* The first pattern that matches, working from left to right, determines
|
||||
* the {@link Status#ALLOWED Status.ALLOWED}
|
||||
* or {@link Status#REJECTED Status.REJECTED} result.
|
||||
* If nothing matches, the result is {@link Status#UNDECIDED Status.UNDECIDED}.
|
||||
*
|
||||
* @param pattern the pattern string to parse; not null
|
||||
* @return a filter to check a class being deserialized; may be null;
|
||||
* {@code null} if no patterns
|
||||
* @throws IllegalArgumentException
|
||||
* if a limit is missing the name, or the long value
|
||||
* is not a number or is negative,
|
||||
* or the module name is missing if the pattern contains "/"
|
||||
* or if the package is missing for ".*" and ".**"
|
||||
*/
|
||||
public static ObjectInputFilter createFilter(String pattern) {
|
||||
Objects.requireNonNull(pattern, "pattern");
|
||||
return Global.createFilter(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of ObjectInputFilter that performs the checks of
|
||||
* the process-wide serialization filter. If configured, it will be
|
||||
* used for all ObjectInputStreams that do not set their own filters.
|
||||
*
|
||||
*/
|
||||
final static class Global implements ObjectInputFilter {
|
||||
/**
|
||||
* The pattern used to create the filter.
|
||||
*/
|
||||
private final String pattern;
|
||||
/**
|
||||
* The list of class filters.
|
||||
*/
|
||||
private final List<Function<Class<?>, Status>> filters;
|
||||
/**
|
||||
* Maximum allowed bytes in the stream.
|
||||
*/
|
||||
private long maxStreamBytes;
|
||||
/**
|
||||
* Maximum depth of the graph allowed.
|
||||
*/
|
||||
private long maxDepth;
|
||||
/**
|
||||
* Maximum number of references in a graph.
|
||||
*/
|
||||
private long maxReferences;
|
||||
/**
|
||||
* Maximum length of any array.
|
||||
*/
|
||||
private long maxArrayLength;
|
||||
|
||||
/**
|
||||
* Returns an ObjectInputFilter from a string of patterns.
|
||||
*
|
||||
* @param pattern the pattern string to parse
|
||||
* @return a filter to check a class being deserialized; not null
|
||||
* @throws IllegalArgumentException if the parameter is malformed
|
||||
* if the pattern is missing the name, the long value
|
||||
* is not a number or is negative.
|
||||
*/
|
||||
static ObjectInputFilter createFilter(String pattern) {
|
||||
Global filter = new Global(pattern);
|
||||
return filter.isEmpty() ? null : filter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new filter from the pattern String.
|
||||
*
|
||||
* @param pattern a pattern string of filters
|
||||
* @throws IllegalArgumentException if the pattern is malformed
|
||||
*/
|
||||
private Global(String pattern) {
|
||||
this.pattern = pattern;
|
||||
|
||||
maxArrayLength = Long.MAX_VALUE; // Default values are unlimited
|
||||
maxDepth = Long.MAX_VALUE;
|
||||
maxReferences = Long.MAX_VALUE;
|
||||
maxStreamBytes = Long.MAX_VALUE;
|
||||
|
||||
String[] patterns = pattern.split(";");
|
||||
filters = new ArrayList<>(patterns.length);
|
||||
for (int i = 0; i < patterns.length; i++) {
|
||||
String p = patterns[i];
|
||||
int nameLen = p.length();
|
||||
if (nameLen == 0) {
|
||||
continue;
|
||||
}
|
||||
if (parseLimit(p)) {
|
||||
// If the pattern contained a limit setting, i.e. type=value
|
||||
continue;
|
||||
}
|
||||
boolean negate = p.charAt(0) == '!';
|
||||
int poffset = negate ? 1 : 0;
|
||||
|
||||
// isolate module name, if any
|
||||
int slash = p.indexOf('/', poffset);
|
||||
if (slash == poffset) {
|
||||
throw new IllegalArgumentException("module name is missing in: \"" + pattern + "\"");
|
||||
}
|
||||
final String moduleName = (slash >= 0) ? p.substring(poffset, slash) : null;
|
||||
poffset = (slash >= 0) ? slash + 1 : poffset;
|
||||
|
||||
final Function<Class<?>, Status> patternFilter;
|
||||
if (p.endsWith("*")) {
|
||||
// Wildcard cases
|
||||
if (p.endsWith(".*")) {
|
||||
// Pattern is a package name with a wildcard
|
||||
final String pkg = p.substring(poffset, nameLen - 1);
|
||||
if (pkg.length() < 2) {
|
||||
throw new IllegalArgumentException("package missing in: \"" + pattern + "\"");
|
||||
}
|
||||
if (negate) {
|
||||
// A Function that fails if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> matchesPackage(c, pkg) ? Status.REJECTED : Status.UNDECIDED;
|
||||
} else {
|
||||
// A Function that succeeds if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> matchesPackage(c, pkg) ? Status.ALLOWED : Status.UNDECIDED;
|
||||
}
|
||||
} else if (p.endsWith(".**")) {
|
||||
// Pattern is a package prefix with a double wildcard
|
||||
final String pkgs = p.substring(poffset, nameLen - 2);
|
||||
if (pkgs.length() < 2) {
|
||||
throw new IllegalArgumentException("package missing in: \"" + pattern + "\"");
|
||||
}
|
||||
if (negate) {
|
||||
// A Function that fails if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().startsWith(pkgs) ? Status.REJECTED : Status.UNDECIDED;
|
||||
} else {
|
||||
// A Function that succeeds if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().startsWith(pkgs) ? Status.ALLOWED : Status.UNDECIDED;
|
||||
}
|
||||
} else {
|
||||
// Pattern is a classname (possibly empty) with a trailing wildcard
|
||||
final String className = p.substring(poffset, nameLen - 1);
|
||||
if (negate) {
|
||||
// A Function that fails if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().startsWith(className) ? Status.REJECTED : Status.UNDECIDED;
|
||||
} else {
|
||||
// A Function that succeeds if the class starts with the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().startsWith(className) ? Status.ALLOWED : Status.UNDECIDED;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
final String name = p.substring(poffset);
|
||||
if (name.isEmpty()) {
|
||||
throw new IllegalArgumentException("class or package missing in: \"" + pattern + "\"");
|
||||
}
|
||||
// Pattern is a class name
|
||||
if (negate) {
|
||||
// A Function that fails if the class equals the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().equals(name) ? Status.REJECTED : Status.UNDECIDED;
|
||||
} else {
|
||||
// A Function that succeeds if the class equals the pattern, otherwise don't care
|
||||
patternFilter = c -> c.getName().equals(name) ? Status.ALLOWED : Status.UNDECIDED;
|
||||
}
|
||||
}
|
||||
// If there is a moduleName, combine the module name check with the package/class check
|
||||
if (moduleName == null) {
|
||||
filters.add(patternFilter);
|
||||
} else {
|
||||
filters.add(c -> moduleName.equals(c.getModule().getName()) ? patternFilter.apply(c) : Status.UNDECIDED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if this filter has any checks.
|
||||
* @return {@code true} if the filter has any checks, {@code false} otherwise
|
||||
*/
|
||||
private boolean isEmpty() {
|
||||
return filters.isEmpty() &&
|
||||
maxArrayLength == Long.MAX_VALUE &&
|
||||
maxDepth == Long.MAX_VALUE &&
|
||||
maxReferences == Long.MAX_VALUE &&
|
||||
maxStreamBytes == Long.MAX_VALUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse out a limit for one of maxarray, maxdepth, maxbytes, maxreferences.
|
||||
*
|
||||
* @param pattern a string with a type name, '=' and a value
|
||||
* @return {@code true} if a limit was parsed, else {@code false}
|
||||
* @throws IllegalArgumentException if the pattern is missing
|
||||
* the name, the Long value is not a number or is negative.
|
||||
*/
|
||||
private boolean parseLimit(String pattern) {
|
||||
int eqNdx = pattern.indexOf('=');
|
||||
if (eqNdx < 0) {
|
||||
// not a limit pattern
|
||||
return false;
|
||||
}
|
||||
String valueString = pattern.substring(eqNdx + 1);
|
||||
if (pattern.startsWith("maxdepth=")) {
|
||||
maxDepth = parseValue(valueString);
|
||||
} else if (pattern.startsWith("maxarray=")) {
|
||||
maxArrayLength = parseValue(valueString);
|
||||
} else if (pattern.startsWith("maxrefs=")) {
|
||||
maxReferences = parseValue(valueString);
|
||||
} else if (pattern.startsWith("maxbytes=")) {
|
||||
maxStreamBytes = parseValue(valueString);
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown limit: " + pattern.substring(0, eqNdx));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the value of a limit and check that it is non-negative.
|
||||
* @param string inputstring
|
||||
* @return the parsed value
|
||||
* @throws IllegalArgumentException if parsing the value fails or the value is negative
|
||||
*/
|
||||
private static long parseValue(String string) throws IllegalArgumentException {
|
||||
// Parse a Long from after the '=' to the end
|
||||
long value = Long.parseLong(string);
|
||||
if (value < 0) {
|
||||
throw new IllegalArgumentException("negative limit: " + string);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public Status checkInput(FilterInfo filterInfo) {
|
||||
if (filterInfo.references() < 0
|
||||
|| filterInfo.depth() < 0
|
||||
|| filterInfo.streamBytes() < 0
|
||||
|| filterInfo.references() > maxReferences
|
||||
|| filterInfo.depth() > maxDepth
|
||||
|| filterInfo.streamBytes() > maxStreamBytes) {
|
||||
return Status.REJECTED;
|
||||
}
|
||||
|
||||
Class<?> clazz = filterInfo.serialClass();
|
||||
if (clazz != null) {
|
||||
if (clazz.isArray()) {
|
||||
if (filterInfo.arrayLength() >= 0 && filterInfo.arrayLength() > maxArrayLength) {
|
||||
// array length is too big
|
||||
return Status.REJECTED;
|
||||
}
|
||||
do {
|
||||
// Arrays are decided based on the component type
|
||||
clazz = clazz.getComponentType();
|
||||
} while (clazz.isArray());
|
||||
}
|
||||
|
||||
if (clazz.isPrimitive()) {
|
||||
// Primitive types are undecided; let someone else decide
|
||||
return Status.UNDECIDED;
|
||||
} else {
|
||||
// Find any filter that allowed or rejected the class
|
||||
final Class<?> cl = clazz;
|
||||
Optional<Status> status = filters.stream()
|
||||
.map(f -> f.apply(cl))
|
||||
.filter(p -> p != Status.UNDECIDED)
|
||||
.findFirst();
|
||||
return status.orElse(Status.UNDECIDED);
|
||||
}
|
||||
}
|
||||
return Status.UNDECIDED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if the class is in the package.
|
||||
*
|
||||
* @param c a class
|
||||
* @param pkg a package name (including the trailing ".")
|
||||
* @return {@code true} if the class is in the package,
|
||||
* otherwise {@code false}
|
||||
*/
|
||||
private static boolean matchesPackage(Class<?> c, String pkg) {
|
||||
String n = c.getName();
|
||||
return n.startsWith(pkg) && n.lastIndexOf('.') == pkg.length() - 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the pattern used to create this filter.
|
||||
* @return the pattern used to create this filter
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return pattern;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@
|
||||
package java.io;
|
||||
|
||||
import java.io.ObjectStreamClass.WeakClassKey;
|
||||
import java.lang.System.Logger;
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.reflect.Array;
|
||||
import java.lang.reflect.Modifier;
|
||||
@ -37,10 +38,12 @@ import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import static java.io.ObjectStreamClass.processQueue;
|
||||
import jdk.internal.misc.JavaObjectInputStreamAccess;
|
||||
|
||||
import jdk.internal.misc.ObjectStreamClassValidator;
|
||||
import jdk.internal.misc.SharedSecrets;
|
||||
import jdk.internal.misc.Unsafe;
|
||||
@ -172,6 +175,16 @@ import sun.reflect.misc.ReflectUtil;
|
||||
* protected) or that there are get and set methods that can be used to restore
|
||||
* the state.
|
||||
*
|
||||
* <p>The contents of the stream can be filtered during deserialization.
|
||||
* If a {@linkplain #setObjectInputFilter(ObjectInputFilter) filter is set}
|
||||
* on an ObjectInputStream, the {@link ObjectInputFilter} can check that
|
||||
* the classes, array lengths, number of references in the stream, depth, and
|
||||
* number of bytes consumed from the input stream are allowed and
|
||||
* if not, can terminate deserialization.
|
||||
* A {@linkplain ObjectInputFilter.Config#setSerialFilter(ObjectInputFilter) process-wide filter}
|
||||
* can be configured that is applied to each {@code ObjectInputStream} unless replaced
|
||||
* using {@link #setObjectInputFilter(ObjectInputFilter) setObjectInputFilter}.
|
||||
*
|
||||
* <p>Any exception that occurs while deserializing an object will be caught by
|
||||
* the ObjectInputStream and abort the reading process.
|
||||
*
|
||||
@ -240,12 +253,32 @@ public class ObjectInputStream
|
||||
new ReferenceQueue<>();
|
||||
}
|
||||
|
||||
/*
|
||||
* Separate class to defer initialization of logging until needed.
|
||||
*/
|
||||
private static class Logging {
|
||||
/*
|
||||
* Logger for ObjectInputFilter results.
|
||||
* Setup the filter logger if it is set to DEBUG or TRACE.
|
||||
* (Assuming it will not change).
|
||||
*/
|
||||
static final System.Logger filterLogger;
|
||||
|
||||
static {
|
||||
Logger filterLog = System.getLogger("java.io.serialization");
|
||||
filterLogger = (filterLog.isLoggable(Logger.Level.DEBUG)
|
||||
|| filterLog.isLoggable(Logger.Level.TRACE)) ? filterLog : null;
|
||||
}
|
||||
}
|
||||
|
||||
/** filter stream for handling block data conversion */
|
||||
private final BlockDataInputStream bin;
|
||||
/** validation callback list */
|
||||
private final ValidationList vlist;
|
||||
/** recursion depth */
|
||||
private int depth;
|
||||
private long depth;
|
||||
/** Total number of references to any type of object, class, enum, proxy, etc. */
|
||||
private long totalObjectRefs;
|
||||
/** whether stream is closed */
|
||||
private boolean closed;
|
||||
|
||||
@ -268,12 +301,21 @@ public class ObjectInputStream
|
||||
*/
|
||||
private SerialCallbackContext curContext;
|
||||
|
||||
/**
|
||||
* Filter of class descriptors and classes read from the stream;
|
||||
* may be null.
|
||||
*/
|
||||
private ObjectInputFilter serialFilter;
|
||||
|
||||
/**
|
||||
* Creates an ObjectInputStream that reads from the specified InputStream.
|
||||
* A serialization stream header is read from the stream and verified.
|
||||
* This constructor will block until the corresponding ObjectOutputStream
|
||||
* has written and flushed the header.
|
||||
*
|
||||
* <p>The serialization filter is initialized to the value of
|
||||
* {@linkplain ObjectInputFilter.Config#getSerialFilter() the process-wide filter}.
|
||||
*
|
||||
* <p>If a security manager is installed, this constructor will check for
|
||||
* the "enableSubclassImplementation" SerializablePermission when invoked
|
||||
* directly or indirectly by the constructor of a subclass which overrides
|
||||
@ -295,6 +337,7 @@ public class ObjectInputStream
|
||||
bin = new BlockDataInputStream(in);
|
||||
handles = new HandleTable(10);
|
||||
vlist = new ValidationList();
|
||||
serialFilter = ObjectInputFilter.Config.getSerialFilter();
|
||||
enableOverride = false;
|
||||
readStreamHeader();
|
||||
bin.setBlockDataMode(true);
|
||||
@ -305,6 +348,9 @@ public class ObjectInputStream
|
||||
* ObjectInputStream to not have to allocate private data just used by this
|
||||
* implementation of ObjectInputStream.
|
||||
*
|
||||
* <p>The serialization filter is initialized to the value of
|
||||
* {@linkplain ObjectInputFilter.Config#getSerialFilter() the process-wide filter}.
|
||||
*
|
||||
* <p>If there is a security manager installed, this method first calls the
|
||||
* security manager's <code>checkPermission</code> method with the
|
||||
* <code>SerializablePermission("enableSubclassImplementation")</code>
|
||||
@ -325,6 +371,7 @@ public class ObjectInputStream
|
||||
bin = null;
|
||||
handles = null;
|
||||
vlist = null;
|
||||
serialFilter = ObjectInputFilter.Config.getSerialFilter();
|
||||
enableOverride = true;
|
||||
}
|
||||
|
||||
@ -332,7 +379,7 @@ public class ObjectInputStream
|
||||
* Read an object from the ObjectInputStream. The class of the object, the
|
||||
* signature of the class, and the values of the non-transient and
|
||||
* non-static fields of the class and all of its supertypes are read.
|
||||
* Default deserializing for a class can be overriden using the writeObject
|
||||
* Default deserializing for a class can be overridden using the writeObject
|
||||
* and readObject methods. Objects referenced by this object are read
|
||||
* transitively so that a complete equivalent graph of objects is
|
||||
* reconstructed by readObject.
|
||||
@ -343,6 +390,10 @@ public class ObjectInputStream
|
||||
* priorities. The callbacks are registered by objects (in the readObject
|
||||
* special methods) as they are individually restored.
|
||||
*
|
||||
* <p>The serialization filter, when not {@code null}, is invoked for
|
||||
* each object (regular or class) read to reconstruct the root object.
|
||||
* See {@link #setObjectInputFilter(ObjectInputFilter) setObjectInputFilter} for details.
|
||||
*
|
||||
* <p>Exceptions are thrown for problems with the InputStream and for
|
||||
* classes that should not be deserialized. All exceptions are fatal to
|
||||
* the InputStream and leave it in an indeterminate state; it is up to the
|
||||
@ -438,6 +489,10 @@ public class ObjectInputStream
|
||||
* invocation of readObject or readUnshared on the ObjectInputStream,
|
||||
* even if the underlying data stream has been manipulated.
|
||||
*
|
||||
* <p>The serialization filter, when not {@code null}, is invoked for
|
||||
* each object (regular or class) read to reconstruct the root object.
|
||||
* See {@link #setObjectInputFilter(ObjectInputFilter) setObjectInputFilter} for details.
|
||||
*
|
||||
* <p>ObjectInputStream subclasses which override this method can only be
|
||||
* constructed in security contexts possessing the
|
||||
* "enableSubclassImplementation" SerializablePermission; any attempt to
|
||||
@ -1093,6 +1148,134 @@ public class ObjectInputStream
|
||||
return bin.readUTF();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the serialization filter for this stream.
|
||||
* The serialization filter is the most recent filter set in
|
||||
* {@link #setObjectInputFilter setObjectInputFilter} or
|
||||
* the initial process-wide filter from
|
||||
* {@link ObjectInputFilter.Config#getSerialFilter() ObjectInputFilter.Config.getSerialFilter}.
|
||||
*
|
||||
* @return the serialization filter for the stream; may be null
|
||||
* @since 9
|
||||
*/
|
||||
public final ObjectInputFilter getObjectInputFilter() {
|
||||
return serialFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the serialization filter for the stream.
|
||||
* The filter's {@link ObjectInputFilter#checkInput checkInput} method is called
|
||||
* for each class and reference in the stream.
|
||||
* The filter can check any or all of the class, the array length, the number
|
||||
* of references, the depth of the graph, and the size of the input stream.
|
||||
* <p>
|
||||
* If the filter returns {@link ObjectInputFilter.Status#REJECTED Status.REJECTED},
|
||||
* {@code null} or throws a {@link RuntimeException},
|
||||
* the active {@code readObject} or {@code readUnshared}
|
||||
* throws {@link InvalidClassException}, otherwise deserialization
|
||||
* continues uninterrupted.
|
||||
* <p>
|
||||
* The serialization filter is initialized to the value of
|
||||
* {@link ObjectInputFilter.Config#getSerialFilter() ObjectInputFilter.Config.getSerialFilter}
|
||||
* when the {@code ObjectInputStream} is constructed and can be set
|
||||
* to a custom filter only once.
|
||||
*
|
||||
* @implSpec
|
||||
* The filter, when not {@code null}, is invoked during {@link #readObject readObject}
|
||||
* and {@link #readUnshared readUnshared} for each object
|
||||
* (regular or class) in the stream including the following:
|
||||
* <ul>
|
||||
* <li>each object reference previously deserialized from the stream
|
||||
* (class is {@code null}, arrayLength is -1),
|
||||
* <li>each regular class (class is not {@code null}, arrayLength is -1),
|
||||
* <li>each interface of a dynamic proxy and the dynamic proxy class itself
|
||||
* (class is not {@code null}, arrayLength is -1),
|
||||
* <li>each array is filtered using the array type and length of the array
|
||||
* (class is the array type, arrayLength is the requested length),
|
||||
* <li>each object replaced by its class' {@code readResolve} method
|
||||
* is filtered using the replacement object's class, if not {@code null},
|
||||
* and if it is an array, the arrayLength, otherwise -1,
|
||||
* <li>and each object replaced by {@link #resolveObject resolveObject}
|
||||
* is filtered using the replacement object's class, if not {@code null},
|
||||
* and if it is an array, the arrayLength, otherwise -1.
|
||||
* </ul>
|
||||
*
|
||||
* When the {@link ObjectInputFilter#checkInput checkInput} method is invoked
|
||||
* it is given access to the current class, the array length,
|
||||
* the current number of references already read from the stream,
|
||||
* the depth of nested calls to {@link #readObject readObject} or
|
||||
* {@link #readUnshared readUnshared},
|
||||
* and the implementation dependent number of bytes consumed from the input stream.
|
||||
* <p>
|
||||
* Each call to {@link #readObject readObject} or
|
||||
* {@link #readUnshared readUnshared} increases the depth by 1
|
||||
* before reading an object and decreases by 1 before returning
|
||||
* normally or exceptionally.
|
||||
* The depth starts at {@code 1} and increases for each nested object and
|
||||
* decrements when each nested call returns.
|
||||
* The count of references in the stream starts at {@code 1} and
|
||||
* is increased before reading an object.
|
||||
*
|
||||
* @param filter the filter, may be null
|
||||
* @throws SecurityException if there is security manager and the
|
||||
* {@code SerializablePermission("serialFilter")} is not granted
|
||||
* @throws IllegalStateException if the {@linkplain #getObjectInputFilter() current filter}
|
||||
* is not {@code null} and is not the process-wide filter
|
||||
* @since 9
|
||||
*/
|
||||
public final void setObjectInputFilter(ObjectInputFilter filter) {
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPermission(ObjectStreamConstants.SERIAL_FILTER_PERMISSION);
|
||||
}
|
||||
// Allow replacement of the process-wide filter if not already set
|
||||
if (serialFilter != null &&
|
||||
serialFilter != ObjectInputFilter.Config.getSerialFilter()) {
|
||||
throw new IllegalStateException("filter can not be set more than once");
|
||||
}
|
||||
this.serialFilter = filter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke the serialization filter if non-null.
|
||||
* If the filter rejects or an exception is thrown, throws InvalidClassException.
|
||||
*
|
||||
* @param clazz the class; may be null
|
||||
* @param arrayLength the array length requested; use {@code -1} if not creating an array
|
||||
* @throws InvalidClassException if it rejected by the filter or
|
||||
* a {@link RuntimeException} is thrown
|
||||
*/
|
||||
private void filterCheck(Class<?> clazz, int arrayLength)
|
||||
throws InvalidClassException {
|
||||
if (serialFilter != null) {
|
||||
RuntimeException ex = null;
|
||||
ObjectInputFilter.Status status;
|
||||
try {
|
||||
status = serialFilter.checkInput(new FilterValues(clazz, arrayLength,
|
||||
totalObjectRefs, depth, bin.getBytesRead()));
|
||||
} catch (RuntimeException e) {
|
||||
// Preventive interception of an exception to log
|
||||
status = ObjectInputFilter.Status.REJECTED;
|
||||
ex = e;
|
||||
}
|
||||
if (Logging.filterLogger != null) {
|
||||
// Debug logging of filter checks that fail; Tracing for those that succeed
|
||||
Logging.filterLogger.log(status == null || status == ObjectInputFilter.Status.REJECTED
|
||||
? Logger.Level.DEBUG
|
||||
: Logger.Level.TRACE,
|
||||
"ObjectInputFilter {0}: {1}, array length: {2}, nRefs: {3}, depth: {4}, bytes: {5}, ex: {6}",
|
||||
status, clazz, arrayLength, totalObjectRefs, depth, bin.getBytesRead(),
|
||||
Objects.toString(ex, "n/a"));
|
||||
}
|
||||
if (status == null ||
|
||||
status == ObjectInputFilter.Status.REJECTED) {
|
||||
InvalidClassException ice = new InvalidClassException("filter status: " + status);
|
||||
ice.initCause(ex);
|
||||
throw ice;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide access to the persistent fields read from the input stream.
|
||||
*/
|
||||
@ -1280,7 +1463,7 @@ public class ObjectInputStream
|
||||
*/
|
||||
private static Boolean auditSubclass(Class<?> subcl) {
|
||||
return AccessController.doPrivileged(
|
||||
new PrivilegedAction<>() {
|
||||
new PrivilegedAction<Boolean>() {
|
||||
public Boolean run() {
|
||||
for (Class<?> cl = subcl;
|
||||
cl != ObjectInputStream.class;
|
||||
@ -1340,6 +1523,7 @@ public class ObjectInputStream
|
||||
}
|
||||
|
||||
depth++;
|
||||
totalObjectRefs++;
|
||||
try {
|
||||
switch (tc) {
|
||||
case TC_NULL:
|
||||
@ -1416,6 +1600,15 @@ public class ObjectInputStream
|
||||
}
|
||||
Object rep = resolveObject(obj);
|
||||
if (rep != obj) {
|
||||
// The type of the original object has been filtered but resolveObject
|
||||
// may have replaced it; filter the replacement's type
|
||||
if (rep != null) {
|
||||
if (rep.getClass().isArray()) {
|
||||
filterCheck(rep.getClass(), Array.getLength(rep));
|
||||
} else {
|
||||
filterCheck(rep.getClass(), -1);
|
||||
}
|
||||
}
|
||||
handles.setObject(passHandle, rep);
|
||||
}
|
||||
return rep;
|
||||
@ -1486,6 +1679,7 @@ public class ObjectInputStream
|
||||
throw new InvalidObjectException(
|
||||
"cannot read back reference to unshared object");
|
||||
}
|
||||
filterCheck(null, -1); // just a check for number of references, depth, no class
|
||||
return obj;
|
||||
}
|
||||
|
||||
@ -1590,6 +1784,10 @@ public class ObjectInputStream
|
||||
ReflectUtil.checkProxyPackageAccess(
|
||||
getClass().getClassLoader(),
|
||||
cl.getInterfaces());
|
||||
// Filter the interfaces
|
||||
for (Class<?> clazz : cl.getInterfaces()) {
|
||||
filterCheck(clazz, -1);
|
||||
}
|
||||
}
|
||||
} catch (ClassNotFoundException ex) {
|
||||
resolveEx = ex;
|
||||
@ -1598,6 +1796,9 @@ public class ObjectInputStream
|
||||
|
||||
desc.initProxy(cl, resolveEx, readClassDesc(false));
|
||||
|
||||
// Call filterCheck on the definition
|
||||
filterCheck(desc.forClass(), -1);
|
||||
|
||||
handles.finish(descHandle);
|
||||
passHandle = descHandle;
|
||||
return desc;
|
||||
@ -1645,8 +1846,12 @@ public class ObjectInputStream
|
||||
|
||||
desc.initNonProxy(readDesc, cl, resolveEx, readClassDesc(false));
|
||||
|
||||
// Call filterCheck on the definition
|
||||
filterCheck(desc.forClass(), -1);
|
||||
|
||||
handles.finish(descHandle);
|
||||
passHandle = descHandle;
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
@ -1687,6 +1892,8 @@ public class ObjectInputStream
|
||||
ObjectStreamClass desc = readClassDesc(false);
|
||||
int len = bin.readInt();
|
||||
|
||||
filterCheck(desc.forClass(), len);
|
||||
|
||||
Object array = null;
|
||||
Class<?> cl, ccl = null;
|
||||
if ((cl = desc.forClass()) != null) {
|
||||
@ -1835,6 +2042,14 @@ public class ObjectInputStream
|
||||
rep = cloneArray(rep);
|
||||
}
|
||||
if (rep != obj) {
|
||||
// Filter the replacement object
|
||||
if (rep != null) {
|
||||
if (rep.getClass().isArray()) {
|
||||
filterCheck(rep.getClass(), Array.getLength(rep));
|
||||
} else {
|
||||
filterCheck(rep.getClass(), -1);
|
||||
}
|
||||
}
|
||||
handles.setObject(passHandle, obj = rep);
|
||||
}
|
||||
}
|
||||
@ -2360,7 +2575,7 @@ public class ObjectInputStream
|
||||
try {
|
||||
while (list != null) {
|
||||
AccessController.doPrivileged(
|
||||
new PrivilegedExceptionAction<>()
|
||||
new PrivilegedExceptionAction<Void>()
|
||||
{
|
||||
public Void run() throws InvalidObjectException {
|
||||
list.obj.validateObject();
|
||||
@ -2383,6 +2598,51 @@ public class ObjectInputStream
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hold a snapshot of values to be passed to an ObjectInputFilter.
|
||||
*/
|
||||
static class FilterValues implements ObjectInputFilter.FilterInfo {
|
||||
final Class<?> clazz;
|
||||
final long arrayLength;
|
||||
final long totalObjectRefs;
|
||||
final long depth;
|
||||
final long streamBytes;
|
||||
|
||||
public FilterValues(Class<?> clazz, long arrayLength, long totalObjectRefs,
|
||||
long depth, long streamBytes) {
|
||||
this.clazz = clazz;
|
||||
this.arrayLength = arrayLength;
|
||||
this.totalObjectRefs = totalObjectRefs;
|
||||
this.depth = depth;
|
||||
this.streamBytes = streamBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> serialClass() {
|
||||
return clazz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long arrayLength() {
|
||||
return arrayLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long references() {
|
||||
return totalObjectRefs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long depth() {
|
||||
return depth;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long streamBytes() {
|
||||
return streamBytes;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Input stream supporting single-byte peek operations.
|
||||
*/
|
||||
@ -2392,6 +2652,8 @@ public class ObjectInputStream
|
||||
private final InputStream in;
|
||||
/** peeked byte */
|
||||
private int peekb = -1;
|
||||
/** total bytes read from the stream */
|
||||
private long totalBytesRead = 0;
|
||||
|
||||
/**
|
||||
* Creates new PeekInputStream on top of given underlying stream.
|
||||
@ -2405,7 +2667,12 @@ public class ObjectInputStream
|
||||
* that it does not consume the read value.
|
||||
*/
|
||||
int peek() throws IOException {
|
||||
return (peekb >= 0) ? peekb : (peekb = in.read());
|
||||
if (peekb >= 0) {
|
||||
return peekb;
|
||||
}
|
||||
peekb = in.read();
|
||||
totalBytesRead += peekb >= 0 ? 1 : 0;
|
||||
return peekb;
|
||||
}
|
||||
|
||||
public int read() throws IOException {
|
||||
@ -2414,21 +2681,27 @@ public class ObjectInputStream
|
||||
peekb = -1;
|
||||
return v;
|
||||
} else {
|
||||
return in.read();
|
||||
int nbytes = in.read();
|
||||
totalBytesRead += nbytes >= 0 ? 1 : 0;
|
||||
return nbytes;
|
||||
}
|
||||
}
|
||||
|
||||
public int read(byte[] b, int off, int len) throws IOException {
|
||||
int nbytes;
|
||||
if (len == 0) {
|
||||
return 0;
|
||||
} else if (peekb < 0) {
|
||||
return in.read(b, off, len);
|
||||
nbytes = in.read(b, off, len);
|
||||
totalBytesRead += nbytes >= 0 ? nbytes : 0;
|
||||
return nbytes;
|
||||
} else {
|
||||
b[off++] = (byte) peekb;
|
||||
len--;
|
||||
peekb = -1;
|
||||
int n = in.read(b, off, len);
|
||||
return (n >= 0) ? (n + 1) : 1;
|
||||
nbytes = in.read(b, off, len);
|
||||
totalBytesRead += nbytes >= 0 ? nbytes : 0;
|
||||
return (nbytes >= 0) ? (nbytes + 1) : 1;
|
||||
}
|
||||
}
|
||||
|
||||
@ -2453,7 +2726,9 @@ public class ObjectInputStream
|
||||
skipped++;
|
||||
n--;
|
||||
}
|
||||
return skipped + in.skip(n);
|
||||
n = skipped + in.skip(n);
|
||||
totalBytesRead += n;
|
||||
return n;
|
||||
}
|
||||
|
||||
public int available() throws IOException {
|
||||
@ -2463,6 +2738,10 @@ public class ObjectInputStream
|
||||
public void close() throws IOException {
|
||||
in.close();
|
||||
}
|
||||
|
||||
public long getBytesRead() {
|
||||
return totalBytesRead;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Unsafe UNSAFE = Unsafe.getUnsafe();
|
||||
@ -3346,6 +3625,14 @@ public class ObjectInputStream
|
||||
throw new UTFDataFormatException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of bytes read from the input stream.
|
||||
* @return the number of bytes read from the input stream
|
||||
*/
|
||||
long getBytesRead() {
|
||||
return in.getBytesRead();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -199,6 +199,16 @@ public interface ObjectStreamConstants {
|
||||
*/
|
||||
static final SerializablePermission SUBCLASS_IMPLEMENTATION_PERMISSION =
|
||||
new SerializablePermission("enableSubclassImplementation");
|
||||
|
||||
/**
|
||||
* Enable setting the process-wide serial filter.
|
||||
*
|
||||
* @see java.io.ObjectInputFilter.Config#setSerialFilter(ObjectInputFilter)
|
||||
* @since 9
|
||||
*/
|
||||
static final SerializablePermission SERIAL_FILTER_PERMISSION =
|
||||
new SerializablePermission("serialFilter");
|
||||
|
||||
/**
|
||||
* A Stream Protocol Version. <p>
|
||||
*
|
||||
|
@ -40,7 +40,7 @@ import java.util.StringTokenizer;
|
||||
* The target name is the name of the Serializable permission (see below).
|
||||
*
|
||||
* <P>
|
||||
* The following table lists all the possible SerializablePermission target names,
|
||||
* The following table lists the standard {@code SerializablePermission} target names,
|
||||
* and for each provides a description of what the permission allows
|
||||
* and a discussion of the risks of granting code the permission.
|
||||
*
|
||||
@ -73,6 +73,13 @@ import java.util.StringTokenizer;
|
||||
* malignant data.</td>
|
||||
* </tr>
|
||||
*
|
||||
* <tr>
|
||||
* <td>serialFilter</td>
|
||||
* <td>Setting a filter for ObjectInputStreams.</td>
|
||||
* <td>Code could remove a configured filter and remove protections
|
||||
* already established.</td>
|
||||
* </tr>
|
||||
*
|
||||
* </table>
|
||||
*
|
||||
* @see java.security.BasicPermission
|
||||
|
@ -1516,11 +1516,12 @@ public final class String
|
||||
* @return a hash code value for this object.
|
||||
*/
|
||||
public int hashCode() {
|
||||
if (hash == 0 && value.length > 0) {
|
||||
hash = isLatin1() ? StringLatin1.hashCode(value)
|
||||
: StringUTF16.hashCode(value);
|
||||
int h = hash;
|
||||
if (h == 0 && value.length > 0) {
|
||||
hash = h = isLatin1() ? StringLatin1.hashCode(value)
|
||||
: StringUTF16.hashCode(value);
|
||||
}
|
||||
return hash;
|
||||
return h;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1962,12 +1962,12 @@ import static jdk.internal.org.objectweb.asm.Opcodes.*;
|
||||
* This method is bound as the predicate in {@linkplain MethodHandles#countedLoop(MethodHandle, MethodHandle,
|
||||
* MethodHandle) counting loops}.
|
||||
*
|
||||
* @param counter the counter parameter, passed in during loop execution.
|
||||
* @param limit the upper bound of the parameter, statically bound at loop creation time.
|
||||
* @param counter the counter parameter, passed in during loop execution.
|
||||
*
|
||||
* @return whether the counter has reached the limit.
|
||||
*/
|
||||
static boolean countedLoopPredicate(int counter, int limit) {
|
||||
static boolean countedLoopPredicate(int limit, int counter) {
|
||||
return counter < limit;
|
||||
}
|
||||
|
||||
@ -1975,26 +1975,15 @@ import static jdk.internal.org.objectweb.asm.Opcodes.*;
|
||||
* This method is bound as the step function in {@linkplain MethodHandles#countedLoop(MethodHandle, MethodHandle,
|
||||
* MethodHandle) counting loops} to increment the counter.
|
||||
*
|
||||
* @param limit the upper bound of the loop counter (ignored).
|
||||
* @param counter the loop counter.
|
||||
*
|
||||
* @return the loop counter incremented by 1.
|
||||
*/
|
||||
static int countedLoopStep(int counter, int limit) {
|
||||
static int countedLoopStep(int limit, int counter) {
|
||||
return counter + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is bound as a filter in {@linkplain MethodHandles#countedLoop(MethodHandle, MethodHandle, MethodHandle,
|
||||
* MethodHandle) counting loops} to pass the correct counter value to the body.
|
||||
*
|
||||
* @param counter the loop counter.
|
||||
*
|
||||
* @return the loop counter decremented by 1.
|
||||
*/
|
||||
static int decrementCounter(int counter) {
|
||||
return counter - 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is bound to initialize the loop-local iterator in {@linkplain MethodHandles#iteratedLoop iterating loops}.
|
||||
*
|
||||
@ -2164,12 +2153,11 @@ import static jdk.internal.org.objectweb.asm.Opcodes.*;
|
||||
MH_arrayIdentity = 5,
|
||||
MH_countedLoopPred = 6,
|
||||
MH_countedLoopStep = 7,
|
||||
MH_iteratePred = 8,
|
||||
MH_initIterator = 9,
|
||||
MH_initIterator = 8,
|
||||
MH_iteratePred = 9,
|
||||
MH_iterateNext = 10,
|
||||
MH_decrementCounter = 11,
|
||||
MH_Array_newInstance = 12,
|
||||
MH_LIMIT = 13;
|
||||
MH_Array_newInstance = 11,
|
||||
MH_LIMIT = 12;
|
||||
|
||||
static MethodHandle getConstantHandle(int idx) {
|
||||
MethodHandle handle = HANDLES[idx];
|
||||
@ -2220,18 +2208,15 @@ import static jdk.internal.org.objectweb.asm.Opcodes.*;
|
||||
case MH_countedLoopStep:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "countedLoopStep",
|
||||
MethodType.methodType(int.class, int.class, int.class));
|
||||
case MH_iteratePred:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "iteratePredicate",
|
||||
MethodType.methodType(boolean.class, Iterator.class));
|
||||
case MH_initIterator:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "initIterator",
|
||||
MethodType.methodType(Iterator.class, Iterable.class));
|
||||
case MH_iteratePred:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "iteratePredicate",
|
||||
MethodType.methodType(boolean.class, Iterator.class));
|
||||
case MH_iterateNext:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "iterateNext",
|
||||
MethodType.methodType(Object.class, Iterator.class));
|
||||
case MH_decrementCounter:
|
||||
return IMPL_LOOKUP.findStatic(MethodHandleImpl.class, "decrementCounter",
|
||||
MethodType.methodType(int.class, int.class));
|
||||
case MH_Array_newInstance:
|
||||
return IMPL_LOOKUP.findStatic(Array.class, "newInstance",
|
||||
MethodType.methodType(Object.class, Class.class, int.class));
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -809,6 +809,28 @@ class MethodType implements java.io.Serializable {
|
||||
return sj.toString();
|
||||
}
|
||||
|
||||
/** True if my parameter list is effectively identical to the given full list,
|
||||
* after skipping the given number of my own initial parameters.
|
||||
* In other words, after disregarding {@code skipPos} parameters,
|
||||
* my remaining parameter list is no longer than the {@code fullList}, and
|
||||
* is equal to the same-length initial sublist of {@code fullList}.
|
||||
*/
|
||||
/*non-public*/
|
||||
boolean effectivelyIdenticalParameters(int skipPos, List<Class<?>> fullList) {
|
||||
int myLen = ptypes.length, fullLen = fullList.size();
|
||||
if (skipPos > myLen || myLen - skipPos > fullLen)
|
||||
return false;
|
||||
List<Class<?>> myList = Arrays.asList(ptypes);
|
||||
if (skipPos != 0) {
|
||||
myList = myList.subList(skipPos, myLen);
|
||||
myLen -= skipPos;
|
||||
}
|
||||
if (fullLen == myLen)
|
||||
return myList.equals(fullList);
|
||||
else
|
||||
return myList.equals(fullList.subList(0, myLen));
|
||||
}
|
||||
|
||||
/** True if the old return type can always be viewed (w/o casting) under new return type,
|
||||
* and the new parameters can be viewed (w/o casting) under the old parameter types.
|
||||
*/
|
||||
|
@ -93,23 +93,19 @@ class MulticastSocket extends DatagramSocket {
|
||||
/**
|
||||
* Create a multicast socket.
|
||||
*
|
||||
* <p>If there is a security manager,
|
||||
* its {@code checkListen} method is first called
|
||||
* with 0 as its argument to ensure the operation is allowed.
|
||||
* This could result in a SecurityException.
|
||||
* <p>
|
||||
* If there is a security manager, its {@code checkListen} method is first
|
||||
* called with 0 as its argument to ensure the operation is allowed. This
|
||||
* could result in a SecurityException.
|
||||
* <p>
|
||||
* When the socket is created the
|
||||
* {@link DatagramSocket#setReuseAddress(boolean)} method is
|
||||
* called to enable the SO_REUSEADDR socket option. When
|
||||
* {@link StandardSocketOptions#SO_REUSEPORT SO_REUSEPORT} is
|
||||
* supported then
|
||||
* {@link DatagramSocketImpl#setOption(SocketOption, Object)}
|
||||
* is called to enable the socket option.
|
||||
* {@link DatagramSocket#setReuseAddress(boolean)} method is called to
|
||||
* enable the SO_REUSEADDR socket option.
|
||||
*
|
||||
* @exception IOException if an I/O exception occurs
|
||||
* while creating the MulticastSocket
|
||||
* @exception SecurityException if a security manager exists and its
|
||||
* {@code checkListen} method doesn't allow the operation.
|
||||
* @exception IOException if an I/O exception occurs while creating the
|
||||
* MulticastSocket
|
||||
* @exception SecurityException if a security manager exists and its
|
||||
* {@code checkListen} method doesn't allow the operation.
|
||||
* @see SecurityManager#checkListen
|
||||
* @see java.net.DatagramSocket#setReuseAddress(boolean)
|
||||
* @see java.net.DatagramSocketImpl#setOption(SocketOption, Object)
|
||||
@ -174,17 +170,13 @@ class MulticastSocket extends DatagramSocket {
|
||||
// Enable SO_REUSEADDR before binding
|
||||
setReuseAddress(true);
|
||||
|
||||
// Enable SO_REUSEPORT if supported before binding
|
||||
if (supportedOptions().contains(StandardSocketOptions.SO_REUSEPORT)) {
|
||||
this.setOption(StandardSocketOptions.SO_REUSEPORT, true);
|
||||
}
|
||||
|
||||
if (bindaddr != null) {
|
||||
try {
|
||||
bind(bindaddr);
|
||||
} finally {
|
||||
if (!isBound())
|
||||
if (!isBound()) {
|
||||
close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1996, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -968,7 +968,7 @@ public class DecimalFormat extends NumberFormat {
|
||||
* Decimal : min = 0. max = 3.
|
||||
*
|
||||
*/
|
||||
private void checkAndSetFastPathStatus() {
|
||||
private boolean checkAndSetFastPathStatus() {
|
||||
|
||||
boolean fastPathWasOn = isFastPath;
|
||||
|
||||
@ -998,12 +998,27 @@ public class DecimalFormat extends NumberFormat {
|
||||
} else
|
||||
isFastPath = false;
|
||||
|
||||
resetFastPathData(fastPathWasOn);
|
||||
fastPathCheckNeeded = false;
|
||||
|
||||
/*
|
||||
* Returns true after successfully checking the fast path condition and
|
||||
* setting the fast path data. The return value is used by the
|
||||
* fastFormat() method to decide whether to call the resetFastPathData
|
||||
* method to reinitialize fast path data or is it already initialized
|
||||
* in this method.
|
||||
*/
|
||||
return true;
|
||||
}
|
||||
|
||||
private void resetFastPathData(boolean fastPathWasOn) {
|
||||
// Since some instance properties may have changed while still falling
|
||||
// in the fast-path case, we need to reinitialize fastPathData anyway.
|
||||
if (isFastPath) {
|
||||
// We need to instantiate fastPathData if not already done.
|
||||
if (fastPathData == null)
|
||||
if (fastPathData == null) {
|
||||
fastPathData = new FastPathData();
|
||||
}
|
||||
|
||||
// Sets up the locale specific constants used when formatting.
|
||||
// '0' is our default representation of zero.
|
||||
@ -1011,22 +1026,27 @@ public class DecimalFormat extends NumberFormat {
|
||||
fastPathData.groupingChar = symbols.getGroupingSeparator();
|
||||
|
||||
// Sets up fractional constants related to currency/decimal pattern.
|
||||
fastPathData.fractionalMaxIntBound = (isCurrencyFormat) ? 99 : 999;
|
||||
fastPathData.fractionalScaleFactor = (isCurrencyFormat) ? 100.0d : 1000.0d;
|
||||
fastPathData.fractionalMaxIntBound = (isCurrencyFormat)
|
||||
? 99 : 999;
|
||||
fastPathData.fractionalScaleFactor = (isCurrencyFormat)
|
||||
? 100.0d : 1000.0d;
|
||||
|
||||
// Records the need for adding prefix or suffix
|
||||
fastPathData.positiveAffixesRequired =
|
||||
(positivePrefix.length() != 0) || (positiveSuffix.length() != 0);
|
||||
fastPathData.negativeAffixesRequired =
|
||||
(negativePrefix.length() != 0) || (negativeSuffix.length() != 0);
|
||||
fastPathData.positiveAffixesRequired
|
||||
= (positivePrefix.length() != 0)
|
||||
|| (positiveSuffix.length() != 0);
|
||||
fastPathData.negativeAffixesRequired
|
||||
= (negativePrefix.length() != 0)
|
||||
|| (negativeSuffix.length() != 0);
|
||||
|
||||
// Creates a cached char container for result, with max possible size.
|
||||
int maxNbIntegralDigits = 10;
|
||||
int maxNbGroups = 3;
|
||||
int containerSize =
|
||||
Math.max(positivePrefix.length(), negativePrefix.length()) +
|
||||
maxNbIntegralDigits + maxNbGroups + 1 + maximumFractionDigits +
|
||||
Math.max(positiveSuffix.length(), negativeSuffix.length());
|
||||
int containerSize
|
||||
= Math.max(positivePrefix.length(), negativePrefix.length())
|
||||
+ maxNbIntegralDigits + maxNbGroups + 1
|
||||
+ maximumFractionDigits
|
||||
+ Math.max(positiveSuffix.length(), negativeSuffix.length());
|
||||
|
||||
fastPathData.fastPathContainer = new char[containerSize];
|
||||
|
||||
@ -1038,17 +1058,18 @@ public class DecimalFormat extends NumberFormat {
|
||||
|
||||
// Sets up fixed index positions for integral and fractional digits.
|
||||
// Sets up decimal point in cached result container.
|
||||
int longestPrefixLength =
|
||||
Math.max(positivePrefix.length(), negativePrefix.length());
|
||||
int decimalPointIndex =
|
||||
maxNbIntegralDigits + maxNbGroups + longestPrefixLength;
|
||||
int longestPrefixLength
|
||||
= Math.max(positivePrefix.length(),
|
||||
negativePrefix.length());
|
||||
int decimalPointIndex
|
||||
= maxNbIntegralDigits + maxNbGroups + longestPrefixLength;
|
||||
|
||||
fastPathData.integralLastIndex = decimalPointIndex - 1;
|
||||
fastPathData.integralLastIndex = decimalPointIndex - 1;
|
||||
fastPathData.fractionalFirstIndex = decimalPointIndex + 1;
|
||||
fastPathData.fastPathContainer[decimalPointIndex] =
|
||||
isCurrencyFormat ?
|
||||
symbols.getMonetaryDecimalSeparator() :
|
||||
symbols.getDecimalSeparator();
|
||||
fastPathData.fastPathContainer[decimalPointIndex]
|
||||
= isCurrencyFormat
|
||||
? symbols.getMonetaryDecimalSeparator()
|
||||
: symbols.getDecimalSeparator();
|
||||
|
||||
} else if (fastPathWasOn) {
|
||||
// Previous state was fast-path and is no more.
|
||||
@ -1059,8 +1080,6 @@ public class DecimalFormat extends NumberFormat {
|
||||
fastPathData.charsPositivePrefix = null;
|
||||
fastPathData.charsNegativePrefix = null;
|
||||
}
|
||||
|
||||
fastPathCheckNeeded = false;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1554,9 +1573,11 @@ public class DecimalFormat extends NumberFormat {
|
||||
* @return the formatted result for {@code d} as a string.
|
||||
*/
|
||||
String fastFormat(double d) {
|
||||
boolean isDataSet = false;
|
||||
// (Re-)Evaluates fast-path status if needed.
|
||||
if (fastPathCheckNeeded)
|
||||
checkAndSetFastPathStatus();
|
||||
if (fastPathCheckNeeded) {
|
||||
isDataSet = checkAndSetFastPathStatus();
|
||||
}
|
||||
|
||||
if (!isFastPath )
|
||||
// DecimalFormat instance is not in a fast-path state.
|
||||
@ -1580,9 +1601,21 @@ public class DecimalFormat extends NumberFormat {
|
||||
if (d > MAX_INT_AS_DOUBLE)
|
||||
// Filters out values that are outside expected fast-path range
|
||||
return null;
|
||||
else
|
||||
else {
|
||||
if (!isDataSet) {
|
||||
/*
|
||||
* If the fast path data is not set through
|
||||
* checkAndSetFastPathStatus() and fulfil the
|
||||
* fast path conditions then reset the data
|
||||
* directly through resetFastPathData()
|
||||
*/
|
||||
resetFastPathData(isFastPath);
|
||||
}
|
||||
fastDoubleFormat(d, negative);
|
||||
|
||||
}
|
||||
|
||||
|
||||
// Returns a new string from updated fastPathContainer.
|
||||
return new String(fastPathData.fastPathContainer,
|
||||
fastPathData.firstUsedIndex,
|
||||
|
@ -876,6 +876,8 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
int lastRet = -1; // index of last element returned; -1 if no such
|
||||
int expectedModCount = modCount;
|
||||
|
||||
Itr() {}
|
||||
|
||||
public boolean hasNext() {
|
||||
return cursor != size;
|
||||
}
|
||||
|
@ -1027,7 +1027,7 @@ public final class Locale implements Cloneable, Serializable {
|
||||
* not contain ALL valid codes that can be used to create Locales.
|
||||
* </ul>
|
||||
*
|
||||
* @return Am array of ISO 639 two-letter language codes.
|
||||
* @return An array of ISO 639 two-letter language codes.
|
||||
*/
|
||||
public static String[] getISOLanguages() {
|
||||
if (isoLanguages == null) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -182,14 +182,15 @@ public abstract class RSASignature extends SignatureSpi {
|
||||
}
|
||||
|
||||
// verify the data and return the result. See JCA doc
|
||||
// should be reset to the state after engineInitVerify call.
|
||||
protected boolean engineVerify(byte[] sigBytes) throws SignatureException {
|
||||
if (sigBytes.length != RSACore.getByteLength(publicKey)) {
|
||||
throw new SignatureException("Signature length not correct: got " +
|
||||
try {
|
||||
if (sigBytes.length != RSACore.getByteLength(publicKey)) {
|
||||
throw new SignatureException("Signature length not correct: got " +
|
||||
sigBytes.length + " but was expecting " +
|
||||
RSACore.getByteLength(publicKey));
|
||||
}
|
||||
byte[] digest = getDigestValue();
|
||||
try {
|
||||
}
|
||||
byte[] digest = getDigestValue();
|
||||
byte[] decrypted = RSACore.rsa(sigBytes, publicKey);
|
||||
byte[] unpadded = padding.unpad(decrypted);
|
||||
byte[] decodedDigest = decodeSignature(digestOID, unpadded);
|
||||
@ -202,6 +203,8 @@ public abstract class RSASignature extends SignatureSpi {
|
||||
return false;
|
||||
} catch (IOException e) {
|
||||
throw new SignatureException("Signature encoding error", e);
|
||||
} finally {
|
||||
resetDigest();
|
||||
}
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user