8218751: Do not store original classfiles inside the CDS archive
Remove the OD shared region and decode classfiles on the fly Reviewed-by: jiangli, ccheung, sspitsyn, redestad
This commit is contained in:
parent
b7ed42eedd
commit
d06f3e7e28
@ -247,12 +247,12 @@ class ClassLoader: AllStatic {
|
||||
|
||||
static void load_zip_library();
|
||||
static void load_jimage_library();
|
||||
|
||||
public:
|
||||
static ClassPathEntry* create_class_path_entry(const char *path, const struct stat* st,
|
||||
bool throw_exception,
|
||||
bool is_boot_append, TRAPS);
|
||||
|
||||
public:
|
||||
|
||||
// If the package for the fully qualified class name is in the boot
|
||||
// loader's package entry table then add_package() sets the classpath_index
|
||||
// field so that get_system_package() will know to return a non-null value
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -46,22 +46,22 @@ InstanceKlass* KlassFactory::check_shared_class_file_load_hook(
|
||||
InstanceKlass* ik,
|
||||
Symbol* class_name,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS) {
|
||||
Handle protection_domain,
|
||||
const ClassFileStream *cfs,
|
||||
TRAPS) {
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
assert(ik != NULL, "sanity");
|
||||
assert(ik->is_shared(), "expecting a shared class");
|
||||
|
||||
if (JvmtiExport::should_post_class_file_load_hook()) {
|
||||
assert(THREAD->is_Java_thread(), "must be JavaThread");
|
||||
|
||||
// Post the CFLH
|
||||
JvmtiCachedClassFileData* cached_class_file = NULL;
|
||||
JvmtiCachedClassFileData* archived_class_data = ik->get_archived_class_data();
|
||||
assert(archived_class_data != NULL, "shared class has no archived class data");
|
||||
unsigned char* ptr =
|
||||
VM_RedefineClasses::get_cached_class_file_bytes(archived_class_data);
|
||||
unsigned char* end_ptr =
|
||||
ptr + VM_RedefineClasses::get_cached_class_file_len(archived_class_data);
|
||||
if (cfs == NULL) {
|
||||
cfs = FileMapInfo::open_stream_for_jvmti(ik, CHECK_NULL);
|
||||
}
|
||||
unsigned char* ptr = (unsigned char*)cfs->buffer();
|
||||
unsigned char* end_ptr = ptr + cfs->length();
|
||||
unsigned char* old_ptr = ptr;
|
||||
JvmtiExport::post_class_file_load_hook(class_name,
|
||||
class_loader,
|
||||
@ -75,25 +75,9 @@ InstanceKlass* KlassFactory::check_shared_class_file_load_hook(
|
||||
ClassLoaderData* loader_data =
|
||||
ClassLoaderData::class_loader_data(class_loader());
|
||||
int path_index = ik->shared_classpath_index();
|
||||
const char* pathname;
|
||||
if (path_index < 0) {
|
||||
// shared classes loaded by user defined class loader
|
||||
// do not have shared_classpath_index
|
||||
ModuleEntry* mod_entry = ik->module();
|
||||
if (mod_entry != NULL && (mod_entry->location() != NULL)) {
|
||||
ResourceMark rm;
|
||||
pathname = (const char*)(mod_entry->location()->as_C_string());
|
||||
} else {
|
||||
pathname = "";
|
||||
}
|
||||
} else {
|
||||
SharedClassPathEntry* ent =
|
||||
(SharedClassPathEntry*)FileMapInfo::shared_path(path_index);
|
||||
pathname = ent == NULL ? NULL : ent->name();
|
||||
}
|
||||
ClassFileStream* stream = new ClassFileStream(ptr,
|
||||
end_ptr - ptr,
|
||||
pathname,
|
||||
cfs->source(),
|
||||
ClassFileStream::verify);
|
||||
ClassFileParser parser(stream,
|
||||
class_name,
|
||||
@ -236,24 +220,6 @@ InstanceKlass* KlassFactory::create_from_stream(ClassFileStream* stream,
|
||||
#if INCLUDE_CDS
|
||||
if (DumpSharedSpaces) {
|
||||
ClassLoader::record_result(result, stream, THREAD);
|
||||
#if INCLUDE_JVMTI
|
||||
assert(cached_class_file == NULL, "Sanity");
|
||||
// Archive the class stream data into the optional data section
|
||||
JvmtiCachedClassFileData *p;
|
||||
int len;
|
||||
const unsigned char *bytes;
|
||||
// event based tracing might set cached_class_file
|
||||
if ((bytes = result->get_cached_class_file_bytes()) != NULL) {
|
||||
len = result->get_cached_class_file_len();
|
||||
} else {
|
||||
len = stream->length();
|
||||
bytes = stream->buffer();
|
||||
}
|
||||
p = (JvmtiCachedClassFileData*)os::malloc(offset_of(JvmtiCachedClassFileData, data) + len, mtInternal);
|
||||
p->length = len;
|
||||
memcpy(p->data, bytes, len);
|
||||
result->set_archived_class_data(p);
|
||||
#endif // INCLUDE_JVMTI
|
||||
}
|
||||
#endif // INCLUDE_CDS
|
||||
|
||||
|
@ -80,7 +80,9 @@ class KlassFactory : AllStatic {
|
||||
InstanceKlass* ik,
|
||||
Symbol* class_name,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS);
|
||||
Handle protection_domain,
|
||||
const ClassFileStream *cfs,
|
||||
TRAPS);
|
||||
};
|
||||
|
||||
#endif // SHARE_CLASSFILE_KLASSFACTORY_HPP
|
||||
|
@ -1174,7 +1174,7 @@ InstanceKlass* SystemDictionary::load_shared_boot_class(Symbol* class_name,
|
||||
TRAPS) {
|
||||
InstanceKlass* ik = SystemDictionaryShared::find_builtin_class(class_name);
|
||||
if (ik != NULL && ik->is_shared_boot_class()) {
|
||||
return load_shared_class(ik, Handle(), Handle(), THREAD);
|
||||
return load_shared_class(ik, Handle(), Handle(), NULL, THREAD);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
@ -1274,7 +1274,9 @@ bool SystemDictionary::is_shared_class_visible(Symbol* class_name,
|
||||
|
||||
InstanceKlass* SystemDictionary::load_shared_class(InstanceKlass* ik,
|
||||
Handle class_loader,
|
||||
Handle protection_domain, TRAPS) {
|
||||
Handle protection_domain,
|
||||
const ClassFileStream *cfs,
|
||||
TRAPS) {
|
||||
|
||||
if (ik != NULL) {
|
||||
Symbol* class_name = ik->name();
|
||||
@ -1321,7 +1323,7 @@ InstanceKlass* SystemDictionary::load_shared_class(InstanceKlass* ik,
|
||||
}
|
||||
|
||||
InstanceKlass* new_ik = KlassFactory::check_shared_class_file_load_hook(
|
||||
ik, class_name, class_loader, protection_domain, CHECK_NULL);
|
||||
ik, class_name, class_loader, protection_domain, cfs, CHECK_NULL);
|
||||
if (new_ik != NULL) {
|
||||
// The class is changed by CFLH. Return the new class. The shared class is
|
||||
// not used.
|
||||
|
@ -628,6 +628,7 @@ protected:
|
||||
static InstanceKlass* load_shared_class(InstanceKlass* ik,
|
||||
Handle class_loader,
|
||||
Handle protection_domain,
|
||||
const ClassFileStream *cfs,
|
||||
TRAPS);
|
||||
static InstanceKlass* load_shared_boot_class(Symbol* class_name,
|
||||
TRAPS);
|
||||
|
@ -803,7 +803,7 @@ InstanceKlass* SystemDictionaryShared::load_shared_class_for_builtin_loader(
|
||||
SystemDictionary::is_platform_class_loader(class_loader()))) {
|
||||
Handle protection_domain =
|
||||
SystemDictionaryShared::init_security_info(class_loader, ik, CHECK_NULL);
|
||||
return load_shared_class(ik, class_loader, protection_domain, THREAD);
|
||||
return load_shared_class(ik, class_loader, protection_domain, NULL, THREAD);
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
@ -873,13 +873,15 @@ InstanceKlass* SystemDictionaryShared::lookup_from_stream(Symbol* class_name,
|
||||
}
|
||||
|
||||
return acquire_class_for_current_thread(record->_klass, class_loader,
|
||||
protection_domain, THREAD);
|
||||
protection_domain, cfs,
|
||||
THREAD);
|
||||
}
|
||||
|
||||
InstanceKlass* SystemDictionaryShared::acquire_class_for_current_thread(
|
||||
InstanceKlass *ik,
|
||||
Handle class_loader,
|
||||
Handle protection_domain,
|
||||
const ClassFileStream *cfs,
|
||||
TRAPS) {
|
||||
ClassLoaderData* loader_data = ClassLoaderData::class_loader_data(class_loader());
|
||||
|
||||
@ -900,7 +902,8 @@ InstanceKlass* SystemDictionaryShared::acquire_class_for_current_thread(
|
||||
loader_data->add_class(ik);
|
||||
|
||||
// Load and check super/interfaces, restore unsharable info
|
||||
InstanceKlass* shared_klass = load_shared_class(ik, class_loader, protection_domain, THREAD);
|
||||
InstanceKlass* shared_klass = load_shared_class(ik, class_loader, protection_domain,
|
||||
cfs, THREAD);
|
||||
if (shared_klass == NULL || HAS_PENDING_EXCEPTION) {
|
||||
// TODO: clean up <ik> so it can be used again
|
||||
return NULL;
|
||||
|
@ -207,6 +207,7 @@ private:
|
||||
InstanceKlass *ik,
|
||||
Handle class_loader,
|
||||
Handle protection_domain,
|
||||
const ClassFileStream* cfs,
|
||||
TRAPS);
|
||||
static DumpTimeSharedClassInfo* find_or_allocate_info_for(InstanceKlass* k);
|
||||
static void write_dictionary(RunTimeSharedDictionary* dictionary, bool is_builtin);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -45,6 +45,7 @@
|
||||
#include "prims/jvmtiExport.hpp"
|
||||
#include "runtime/arguments.hpp"
|
||||
#include "runtime/java.hpp"
|
||||
#include "runtime/mutexLocker.hpp"
|
||||
#include "runtime/os.inline.hpp"
|
||||
#include "runtime/vm_version.hpp"
|
||||
#include "services/memTracker.hpp"
|
||||
@ -501,6 +502,16 @@ bool FileMapInfo::validate_shared_path_table() {
|
||||
}
|
||||
|
||||
_validating_shared_path_table = false;
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
if (_classpath_entries_for_jvmti != NULL) {
|
||||
os::free(_classpath_entries_for_jvmti);
|
||||
}
|
||||
size_t sz = sizeof(ClassPathEntry*) * _shared_path_table_size;
|
||||
_classpath_entries_for_jvmti = (ClassPathEntry**)os::malloc(sz, mtClass);
|
||||
memset(_classpath_entries_for_jvmti, 0, sz);
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1440,3 +1451,57 @@ void FileMapInfo::stop_sharing_and_unmap(const char* msg) {
|
||||
fail_stop("%s", msg);
|
||||
}
|
||||
}
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
ClassPathEntry** FileMapInfo::_classpath_entries_for_jvmti = NULL;
|
||||
|
||||
ClassPathEntry* FileMapInfo::get_classpath_entry_for_jvmti(int i, TRAPS) {
|
||||
ClassPathEntry* ent = _classpath_entries_for_jvmti[i];
|
||||
if (ent == NULL) {
|
||||
if (i == 0) {
|
||||
ent = ClassLoader:: get_jrt_entry();
|
||||
assert(ent != NULL, "must be");
|
||||
} else {
|
||||
SharedClassPathEntry* scpe = shared_path(i);
|
||||
assert(scpe->is_jar(), "must be"); // other types of scpe will not produce archived classes
|
||||
|
||||
const char* path = scpe->name();
|
||||
struct stat st;
|
||||
if (os::stat(path, &st) != 0) {
|
||||
char *msg = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, strlen(path) + 128); ;
|
||||
jio_snprintf(msg, strlen(path) + 127, "error in opening JAR file %s", path);
|
||||
THROW_MSG_(vmSymbols::java_io_IOException(), msg, NULL);
|
||||
} else {
|
||||
ent = ClassLoader::create_class_path_entry(path, &st, /*throw_exception=*/true, false, CHECK_NULL);
|
||||
}
|
||||
}
|
||||
|
||||
MutexLocker mu(CDSClassFileStream_lock, THREAD);
|
||||
if (_classpath_entries_for_jvmti[i] == NULL) {
|
||||
_classpath_entries_for_jvmti[i] = ent;
|
||||
} else {
|
||||
// Another thread has beat me to creating this entry
|
||||
delete ent;
|
||||
ent = _classpath_entries_for_jvmti[i];
|
||||
}
|
||||
}
|
||||
|
||||
return ent;
|
||||
}
|
||||
|
||||
ClassFileStream* FileMapInfo::open_stream_for_jvmti(InstanceKlass* ik, TRAPS) {
|
||||
int path_index = ik->shared_classpath_index();
|
||||
assert(path_index >= 0, "should be called for shared built-in classes only");
|
||||
assert(path_index < (int)_shared_path_table_size, "sanity");
|
||||
|
||||
ClassPathEntry* cpe = get_classpath_entry_for_jvmti(path_index, CHECK_NULL);
|
||||
assert(cpe != NULL, "must be");
|
||||
|
||||
Symbol* name = ik->name();
|
||||
const char* const class_name = name->as_C_string();
|
||||
const char* const file_name = ClassLoader::file_name_for_class_name(class_name,
|
||||
name->utf8_length());
|
||||
return cpe->open_stream(file_name, THREAD);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
@ -302,6 +302,10 @@ public:
|
||||
bool validate_shared_path_table();
|
||||
static void update_shared_classpath(ClassPathEntry *cpe, SharedClassPathEntry* ent, TRAPS);
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
static ClassFileStream* open_stream_for_jvmti(InstanceKlass* ik, TRAPS);
|
||||
#endif
|
||||
|
||||
static SharedClassPathEntry* shared_path(int index) {
|
||||
if (index < 0) {
|
||||
return NULL;
|
||||
@ -348,6 +352,11 @@ public:
|
||||
}
|
||||
|
||||
address decode_start_address(CDSFileMapRegion* spc, bool with_current_oop_encoding_mode);
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
static ClassPathEntry** _classpath_entries_for_jvmti;
|
||||
static ClassPathEntry* get_classpath_entry_for_jvmti(int i, TRAPS);
|
||||
#endif
|
||||
};
|
||||
|
||||
#endif // SHARE_MEMORY_FILEMAP_HPP
|
||||
|
@ -207,7 +207,7 @@ public:
|
||||
};
|
||||
|
||||
|
||||
DumpRegion _mc_region("mc"), _ro_region("ro"), _rw_region("rw"), _md_region("md"), _od_region("od");
|
||||
DumpRegion _mc_region("mc"), _ro_region("ro"), _rw_region("rw"), _md_region("md");
|
||||
size_t _total_closed_archive_region_size = 0, _total_open_archive_region_size = 0;
|
||||
|
||||
char* MetaspaceShared::misc_code_space_alloc(size_t num_bytes) {
|
||||
@ -598,23 +598,6 @@ static void rewrite_nofast_bytecodes_and_calculate_fingerprints() {
|
||||
}
|
||||
}
|
||||
|
||||
static void relocate_cached_class_file() {
|
||||
for (int i = 0; i < _global_klass_objects->length(); i++) {
|
||||
Klass* k = _global_klass_objects->at(i);
|
||||
if (k->is_instance_klass()) {
|
||||
InstanceKlass* ik = InstanceKlass::cast(k);
|
||||
JvmtiCachedClassFileData* p = ik->get_archived_class_data();
|
||||
if (p != NULL) {
|
||||
int size = offset_of(JvmtiCachedClassFileData, data) + p->length;
|
||||
JvmtiCachedClassFileData* q = (JvmtiCachedClassFileData*)_od_region.allocate(size);
|
||||
q->length = p->length;
|
||||
memcpy(q->data, p->data, p->length);
|
||||
ik->set_archived_class_data(q);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Objects of the Metadata types (such as Klass and ConstantPool) have C++ vtables.
|
||||
// (In GCC this is the field <Type>::_vptr, i.e., first word in the object.)
|
||||
//
|
||||
@ -1438,15 +1421,11 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
|
||||
char* vtbl_list = _md_region.top();
|
||||
MetaspaceShared::allocate_cpp_vtable_clones();
|
||||
_md_region.pack(&_od_region);
|
||||
_md_region.pack();
|
||||
|
||||
// Relocate the archived class file data into the od region
|
||||
relocate_cached_class_file();
|
||||
_od_region.pack();
|
||||
|
||||
// The 5 core spaces are allocated consecutively mc->rw->ro->md->od, so there total size
|
||||
// The 4 core spaces are allocated consecutively mc->rw->ro->md, so there total size
|
||||
// is just the spaces between the two ends.
|
||||
size_t core_spaces_size = _od_region.end() - _mc_region.base();
|
||||
size_t core_spaces_size = _md_region.end() - _mc_region.base();
|
||||
assert(core_spaces_size == (size_t)align_up(core_spaces_size, Metaspace::reserve_alignment()),
|
||||
"should already be aligned");
|
||||
|
||||
@ -1488,7 +1467,6 @@ void VM_PopulateDumpSharedSpace::doit() {
|
||||
write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
|
||||
write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
|
||||
write_region(mapinfo, MetaspaceShared::md, &_md_region, /*read_only=*/false,/*allow_exec=*/false);
|
||||
write_region(mapinfo, MetaspaceShared::od, &_od_region, /*read_only=*/true, /*allow_exec=*/false);
|
||||
|
||||
_total_closed_archive_region_size = mapinfo->write_archive_heap_regions(
|
||||
_closed_archive_heap_regions,
|
||||
@ -1535,12 +1513,10 @@ void VM_PopulateDumpSharedSpace::print_region_stats() {
|
||||
// Print statistics of all the regions
|
||||
const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
|
||||
_mc_region.reserved() + _md_region.reserved() +
|
||||
_od_region.reserved() +
|
||||
_total_closed_archive_region_size +
|
||||
_total_open_archive_region_size;
|
||||
const size_t total_bytes = _ro_region.used() + _rw_region.used() +
|
||||
_mc_region.used() + _md_region.used() +
|
||||
_od_region.used() +
|
||||
_total_closed_archive_region_size +
|
||||
_total_open_archive_region_size;
|
||||
const double total_u_perc = percent_of(total_bytes, total_reserved);
|
||||
@ -1549,7 +1525,6 @@ void VM_PopulateDumpSharedSpace::print_region_stats() {
|
||||
_rw_region.print(total_reserved);
|
||||
_ro_region.print(total_reserved);
|
||||
_md_region.print(total_reserved);
|
||||
_od_region.print(total_reserved);
|
||||
print_heap_region_stats(_closed_archive_heap_regions, "ca", total_reserved);
|
||||
print_heap_region_stats(_open_archive_heap_regions, "oa", total_reserved);
|
||||
|
||||
@ -1931,33 +1906,30 @@ bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
char* rw_base = NULL; char* rw_top;
|
||||
char* mc_base = NULL; char* mc_top;
|
||||
char* md_base = NULL; char* md_top;
|
||||
char* od_base = NULL; char* od_top;
|
||||
|
||||
// Map each shared region
|
||||
if ((mc_base = mapinfo->map_region(mc, &mc_top)) != NULL &&
|
||||
(rw_base = mapinfo->map_region(rw, &rw_top)) != NULL &&
|
||||
(ro_base = mapinfo->map_region(ro, &ro_top)) != NULL &&
|
||||
(md_base = mapinfo->map_region(md, &md_top)) != NULL &&
|
||||
(od_base = mapinfo->map_region(od, &od_top)) != NULL &&
|
||||
(image_alignment == (size_t)os::vm_allocation_granularity()) &&
|
||||
mapinfo->validate_shared_path_table()) {
|
||||
// Success -- set up MetaspaceObj::_shared_metaspace_{base,top} for
|
||||
// fast checking in MetaspaceShared::is_in_shared_metaspace() and
|
||||
// MetaspaceObj::is_shared().
|
||||
//
|
||||
// We require that mc->rw->ro->md->od to be laid out consecutively, with no
|
||||
// We require that mc->rw->ro->md to be laid out consecutively, with no
|
||||
// gaps between them. That way, we can ensure that the OS won't be able to
|
||||
// allocate any new memory spaces inside _shared_metaspace_{base,top}, which
|
||||
// would mess up the simple comparision in MetaspaceShared::is_in_shared_metaspace().
|
||||
assert(mc_base < ro_base && mc_base < rw_base && mc_base < md_base && mc_base < od_base, "must be");
|
||||
assert(od_top > ro_top && od_top > rw_top && od_top > md_top && od_top > mc_top , "must be");
|
||||
assert(mc_base < ro_base && mc_base < rw_base && mc_base < md_base, "must be");
|
||||
assert(md_top > ro_top && md_top > rw_top && md_top > mc_top , "must be");
|
||||
assert(mc_top == rw_base, "must be");
|
||||
assert(rw_top == ro_base, "must be");
|
||||
assert(ro_top == md_base, "must be");
|
||||
assert(md_top == od_base, "must be");
|
||||
|
||||
_core_spaces_size = mapinfo->core_spaces_size();
|
||||
MetaspaceObj::set_shared_metaspace_range((void*)mc_base, (void*)od_top);
|
||||
MetaspaceObj::set_shared_metaspace_range((void*)mc_base, (void*)md_top);
|
||||
return true;
|
||||
} else {
|
||||
// If there was a failure in mapping any of the spaces, unmap the ones
|
||||
@ -1966,7 +1938,6 @@ bool MetaspaceShared::map_shared_spaces(FileMapInfo* mapinfo) {
|
||||
if (rw_base != NULL) mapinfo->unmap_region(rw);
|
||||
if (mc_base != NULL) mapinfo->unmap_region(mc);
|
||||
if (md_base != NULL) mapinfo->unmap_region(md);
|
||||
if (od_base != NULL) mapinfo->unmap_region(od);
|
||||
#ifndef _WINDOWS
|
||||
// Release the entire mapped region
|
||||
shared_rs.release();
|
||||
@ -2049,7 +2020,6 @@ void MetaspaceShared::report_out_of_space(const char* name, size_t needed_bytes)
|
||||
_rw_region.print_out_of_space_msg(name, needed_bytes);
|
||||
_ro_region.print_out_of_space_msg(name, needed_bytes);
|
||||
_md_region.print_out_of_space_msg(name, needed_bytes);
|
||||
_od_region.print_out_of_space_msg(name, needed_bytes);
|
||||
|
||||
vm_exit_during_initialization(err_msg("Unable to allocate from '%s' region", name),
|
||||
"Please reduce the number of shared classes.");
|
||||
|
@ -69,14 +69,10 @@ class MetaspaceShared : AllStatic {
|
||||
ro = 2, // read-only shared space in the heap
|
||||
md = 3, // miscellaneous data for initializing tables, etc.
|
||||
num_core_spaces = 4, // number of non-string regions
|
||||
|
||||
// optional mapped spaces
|
||||
// Currently it only contains class file data.
|
||||
od = num_core_spaces,
|
||||
num_non_heap_spaces = od + 1,
|
||||
num_non_heap_spaces = 4,
|
||||
|
||||
// mapped java heap regions
|
||||
first_closed_archive_heap_region = od + 1,
|
||||
first_closed_archive_heap_region = md + 1,
|
||||
max_closed_archive_heap_region = 2,
|
||||
last_closed_archive_heap_region = first_closed_archive_heap_region + max_closed_archive_heap_region - 1,
|
||||
first_open_archive_heap_region = last_closed_archive_heap_region + 1,
|
||||
|
@ -2353,6 +2353,7 @@ void InstanceKlass::remove_unshareable_info() {
|
||||
#if INCLUDE_JVMTI
|
||||
guarantee(_breakpoints == NULL, "must be");
|
||||
guarantee(_previous_versions == NULL, "must be");
|
||||
_cached_class_file = NULL;
|
||||
#endif
|
||||
|
||||
_init_thread = NULL;
|
||||
@ -2509,7 +2510,7 @@ void InstanceKlass::release_C_heap_structures() {
|
||||
}
|
||||
|
||||
// deallocate the cached class file
|
||||
if (_cached_class_file != NULL && !MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) {
|
||||
if (_cached_class_file != NULL) {
|
||||
os::free(_cached_class_file);
|
||||
_cached_class_file = NULL;
|
||||
}
|
||||
@ -3970,12 +3971,7 @@ Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
|
||||
|
||||
#if INCLUDE_JVMTI
|
||||
JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
|
||||
if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) {
|
||||
// Ignore the archived class stream data
|
||||
return NULL;
|
||||
} else {
|
||||
return _cached_class_file;
|
||||
}
|
||||
return _cached_class_file;
|
||||
}
|
||||
|
||||
jint InstanceKlass::get_cached_class_file_len() {
|
||||
@ -3985,19 +3981,4 @@ jint InstanceKlass::get_cached_class_file_len() {
|
||||
unsigned char * InstanceKlass::get_cached_class_file_bytes() {
|
||||
return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
|
||||
}
|
||||
|
||||
#if INCLUDE_CDS
|
||||
JvmtiCachedClassFileData* InstanceKlass::get_archived_class_data() {
|
||||
if (DumpSharedSpaces) {
|
||||
return _cached_class_file;
|
||||
} else {
|
||||
assert(this->is_shared(), "class should be shared");
|
||||
if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) {
|
||||
return _cached_class_file;
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
@ -847,14 +847,6 @@ public:
|
||||
JvmtiCachedClassFieldMap* jvmti_cached_class_field_map() const {
|
||||
return _jvmti_cached_class_field_map;
|
||||
}
|
||||
|
||||
#if INCLUDE_CDS
|
||||
void set_archived_class_data(JvmtiCachedClassFileData* data) {
|
||||
_cached_class_file = data;
|
||||
}
|
||||
|
||||
JvmtiCachedClassFileData * get_archived_class_data();
|
||||
#endif // INCLUDE_CDS
|
||||
#else // INCLUDE_JVMTI
|
||||
|
||||
static void purge_previous_versions(InstanceKlass* ik) { return; };
|
||||
|
@ -150,6 +150,9 @@ Mutex* DCmdFactory_lock = NULL;
|
||||
#if INCLUDE_NMT
|
||||
Mutex* NMTQuery_lock = NULL;
|
||||
#endif
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
Mutex* CDSClassFileStream_lock = NULL;
|
||||
#endif
|
||||
|
||||
#define MAX_NUM_MUTEX 128
|
||||
static Monitor * _mutex_array[MAX_NUM_MUTEX];
|
||||
@ -339,6 +342,9 @@ void mutex_init() {
|
||||
#if INCLUDE_NMT
|
||||
def(NMTQuery_lock , PaddedMutex , max_nonleaf, false, Monitor::_safepoint_check_always);
|
||||
#endif
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
def(CDSClassFileStream_lock , PaddedMutex , max_nonleaf, false, Monitor::_safepoint_check_always);
|
||||
#endif
|
||||
}
|
||||
|
||||
GCMutexLocker::GCMutexLocker(Monitor * mutex) {
|
||||
|
@ -133,6 +133,9 @@ extern Mutex* DCmdFactory_lock; // serialize access to DCmdFact
|
||||
#if INCLUDE_NMT
|
||||
extern Mutex* NMTQuery_lock; // serialize NMT Dcmd queries
|
||||
#endif
|
||||
#if INCLUDE_CDS && INCLUDE_JVMTI
|
||||
extern Mutex* CDSClassFileStream_lock; // FileMapInfo::open_stream_for_jvmti
|
||||
#endif
|
||||
#if INCLUDE_JFR
|
||||
extern Mutex* JfrStacktrace_lock; // used to guard access to the JFR stacktrace table
|
||||
extern Monitor* JfrMsg_lock; // protects JFR messaging
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -102,8 +102,8 @@ public class SpaceUtilizationCheck {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (checked.size() != 5) {
|
||||
throw new RuntimeException("Must have 5 consecutive, fully utilized regions");
|
||||
if (checked.size() != 4) {
|
||||
throw new RuntimeException("Must have 4 consecutive, fully utilized regions");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user