8251261: CDS dumping should not clear states in live classes

Reviewed-by: minqi, ccheung
This commit is contained in:
Ioi Lam 2020-09-24 19:19:53 +00:00
parent 9ac162e896
commit 8b85c3a6d7
9 changed files with 88 additions and 138 deletions

View File

@ -1179,8 +1179,7 @@ oop java_lang_Class::archive_mirror(Klass* k, TRAPS) {
if (!(ik->is_shared_boot_class() || ik->is_shared_platform_class() ||
ik->is_shared_app_class())) {
// Archiving mirror for classes from non-builtin loaders is not
// supported. Clear the _java_mirror within the archived class.
k->clear_java_mirror_handle();
// supported.
return NULL;
}
}

View File

@ -41,6 +41,7 @@
#include "utilities/hashtable.inline.hpp"
ArchiveBuilder* ArchiveBuilder::_singleton = NULL;
intx ArchiveBuilder::_buffer_to_target_delta = 0;
ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
char* newtop = ArchiveBuilder::singleton()->_ro_region->top();
@ -564,6 +565,34 @@ void ArchiveBuilder::relocate_well_known_klasses() {
SystemDictionary::well_known_klasses_do(&doit);
}
void ArchiveBuilder::make_klasses_shareable() {
for (int i = 0; i < klasses()->length(); i++) {
Klass* k = klasses()->at(i);
k->remove_java_mirror();
if (k->is_objArray_klass()) {
// InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
// on their array classes.
} else if (k->is_typeArray_klass()) {
k->remove_unshareable_info();
} else {
assert(k->is_instance_klass(), " must be");
InstanceKlass* ik = InstanceKlass::cast(k);
if (DynamicDumpSharedSpaces) {
// For static dump, class loader type are already set.
ik->assign_class_loader_type();
}
MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
ik->remove_unshareable_info();
if (log_is_enabled(Debug, cds, class)) {
ResourceMark rm;
log_debug(cds, class)("klasses[%4d] = " PTR_FORMAT " %s", i, p2i(to_target(ik)), ik->external_name());
}
}
}
}
void ArchiveBuilder::print_stats(int ro_all, int rw_all, int mc_all) {
_alloc_stats->print_stats(ro_all, rw_all, mc_all);
}

View File

@ -193,6 +193,37 @@ protected:
_ro_region = ro_region;
}
protected:
DumpRegion* _current_dump_space;
address _alloc_bottom;
DumpRegion* current_dump_space() const { return _current_dump_space; }
public:
void set_current_dump_space(DumpRegion* r) { _current_dump_space = r; }
bool is_in_buffer_space(address p) const {
return (_alloc_bottom <= p && p < (address)current_dump_space()->top());
}
template <typename T> bool is_in_target_space(T target_obj) const {
address buff_obj = address(target_obj) - _buffer_to_target_delta;
return is_in_buffer_space(buff_obj);
}
template <typename T> bool is_in_buffer_space(T obj) const {
return is_in_buffer_space(address(obj));
}
template <typename T> T to_target_no_check(T obj) const {
return (T)(address(obj) + _buffer_to_target_delta);
}
template <typename T> T to_target(T obj) const {
assert(is_in_buffer_space(obj), "must be");
return (T)(address(obj) + _buffer_to_target_delta);
}
public:
ArchiveBuilder(DumpRegion* rw_region, DumpRegion* ro_region);
~ArchiveBuilder();
@ -208,7 +239,7 @@ public:
void dump_ro_region();
void relocate_pointers();
void relocate_well_known_klasses();
void make_klasses_shareable();
address get_dumped_addr(address src_obj) const;
// All klasses and symbols that will be copied into the archive
@ -235,6 +266,7 @@ public:
}
void print_stats(int ro_all, int rw_all, int mc_all);
static intx _buffer_to_target_delta;
};
#endif // SHARE_MEMORY_ARCHIVEBUILDER_HPP

View File

@ -44,8 +44,6 @@
class DynamicArchiveBuilder : public ArchiveBuilder {
public:
static intx _buffer_to_target_delta;
DumpRegion* _current_dump_space;
static size_t reserve_alignment() {
return os::vm_allocation_granularity();
@ -59,32 +57,6 @@ public:
ArchivePtrMarker::mark_pointer(ptr_loc);
}
DumpRegion* current_dump_space() const {
return _current_dump_space;
}
bool is_in_buffer_space(address p) const {
return (_alloc_bottom <= p && p < (address)current_dump_space()->top());
}
template <typename T> bool is_in_target_space(T target_obj) const {
address buff_obj = address(target_obj) - _buffer_to_target_delta;
return is_in_buffer_space(buff_obj);
}
template <typename T> bool is_in_buffer_space(T obj) const {
return is_in_buffer_space(address(obj));
}
template <typename T> T to_target_no_check(T obj) const {
return (T)(address(obj) + _buffer_to_target_delta);
}
template <typename T> T to_target(T obj) const {
assert(is_in_buffer_space(obj), "must be");
return (T)(address(obj) + _buffer_to_target_delta);
}
template <typename T> T get_dumped_addr(T obj) {
return (T)ArchiveBuilder::get_dumped_addr((address)obj);
}
@ -113,7 +85,6 @@ public:
public:
DynamicArchiveHeader *_header;
address _alloc_bottom;
address _last_verified_top;
size_t _other_region_used_bytes;
@ -128,7 +99,7 @@ public:
void init_header(address addr);
void release_header();
void make_trampolines();
void make_klasses_shareable();
void sort_methods();
void sort_methods(InstanceKlass* ik) const;
void remark_pointers_for_instance_klass(InstanceKlass* k, bool should_mark) const;
void relocate_buffer_to_target();
@ -250,6 +221,7 @@ public:
verify_estimate_size(_estimated_hashtable_bytes, "Hashtables");
make_trampolines();
sort_methods();
log_info(cds)("Make classes shareable");
make_klasses_shareable();
@ -275,8 +247,6 @@ public:
}
};
intx DynamicArchiveBuilder::_buffer_to_target_delta;
size_t DynamicArchiveBuilder::estimate_archive_size() {
// size of the symbol table and two dictionaries, plus the RunTimeSharedClassInfo's
_estimated_hashtable_bytes = 0;
@ -408,35 +378,14 @@ void DynamicArchiveBuilder::make_trampolines() {
guarantee(p <= mc_space->top(), "Estimate of trampoline size is insufficient");
}
void DynamicArchiveBuilder::make_klasses_shareable() {
int i, count = klasses()->length();
void DynamicArchiveBuilder::sort_methods() {
InstanceKlass::disable_method_binary_search();
for (i = 0; i < count; i++) {
for (int i = 0; i < klasses()->length(); i++) {
Klass* k = klasses()->at(i);
if (k->is_instance_klass()) {
sort_methods(InstanceKlass::cast(k));
}
}
for (i = 0; i < count; i++) {
Klass* k = klasses()->at(i);
if (!k->is_instance_klass()) {
continue;
}
InstanceKlass* ik = InstanceKlass::cast(k);
ik->assign_class_loader_type();
MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
ik->remove_unshareable_info();
assert(ik->array_klasses() == NULL, "sanity");
if (log_is_enabled(Debug, cds, dynamic)) {
ResourceMark rm;
log_debug(cds, dynamic)("klasses[%4i] = " PTR_FORMAT " %s", i, p2i(to_target(ik)), ik->external_name());
}
}
}
// The address order of the copied Symbols may be different than when the original

View File

@ -36,6 +36,7 @@
#include "logging/log.hpp"
#include "logging/logMessage.hpp"
#include "logging/logStream.hpp"
#include "memory/archiveBuilder.hpp"
#include "memory/archiveUtils.hpp"
#include "memory/filemap.hpp"
#include "memory/heapShared.inline.hpp"
@ -398,7 +399,7 @@ void KlassSubGraphInfo::add_subgraph_object_klass(Klass* orig_k, Klass *relocate
new(ResourceObj::C_HEAP, mtClass) GrowableArray<Klass*>(50, mtClass);
}
assert(relocated_k->is_shared(), "must be a shared class");
assert(ArchiveBuilder::singleton()->is_in_buffer_space(relocated_k), "must be a shared class");
if (_k == relocated_k) {
// Don't add the Klass containing the sub-graph to it's own klass

View File

@ -544,30 +544,6 @@ GrowableArray<Klass*>* MetaspaceShared::collected_klasses() {
return _global_klass_objects;
}
static void remove_unshareable_in_classes() {
for (int i = 0; i < _global_klass_objects->length(); i++) {
Klass* k = _global_klass_objects->at(i);
if (!k->is_objArray_klass()) {
// InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
// on their array classes.
assert(k->is_instance_klass() || k->is_typeArray_klass(), "must be");
k->remove_unshareable_info();
}
}
}
static void remove_java_mirror_in_classes() {
for (int i = 0; i < _global_klass_objects->length(); i++) {
Klass* k = _global_klass_objects->at(i);
if (!k->is_objArray_klass()) {
// InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
// on their array classes.
assert(k->is_instance_klass() || k->is_typeArray_klass(), "must be");
k->remove_java_mirror();
}
}
}
static void rewrite_nofast_bytecode(const methodHandle& method) {
BytecodeStream bcs(method);
while (!bcs.is_last_bytecode()) {
@ -587,21 +563,9 @@ static void rewrite_nofast_bytecode(const methodHandle& method) {
}
}
// Walk all methods in the class list to ensure that they won't be modified at
// run time. This includes:
// [1] Rewrite all bytecodes as needed, so that the ConstMethod* will not be modified
// at run time by RewriteBytecodes/RewriteFrequentPairs
// [2] Assign a fingerprint, so one doesn't need to be assigned at run-time.
static void rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread* thread) {
for (int i = 0; i < _global_klass_objects->length(); i++) {
Klass* k = _global_klass_objects->at(i);
if (k->is_instance_klass()) {
InstanceKlass* ik = InstanceKlass::cast(k);
MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(thread, ik);
}
}
}
void MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread* thread, InstanceKlass* ik) {
for (int i = 0; i < ik->methods()->length(); i++) {
methodHandle m(thread, ik->methods()->at(i));
@ -645,7 +609,10 @@ public:
class StaticArchiveBuilder : public ArchiveBuilder {
public:
StaticArchiveBuilder(DumpRegion* rw_region, DumpRegion* ro_region)
: ArchiveBuilder(rw_region, ro_region) {}
: ArchiveBuilder(rw_region, ro_region) {
_alloc_bottom = address(SharedBaseAddress);
_buffer_to_target_delta = 0;
}
virtual void iterate_roots(MetaspaceClosure* it, bool is_relocating_pointers) {
FileMapInfo::metaspace_pointers_do(it, false);
@ -669,13 +636,6 @@ public:
char* VM_PopulateDumpSharedSpace::dump_read_only_tables() {
ArchiveBuilder::OtherROAllocMark mark;
log_info(cds)("Removing java_mirror ... ");
if (!HeapShared::is_heap_object_archiving_allowed()) {
Universe::clear_basic_type_mirrors();
}
remove_java_mirror_in_classes();
log_info(cds)("done. ");
SystemDictionaryShared::write_to_archive();
// Write the other data to the output array.
@ -765,19 +725,10 @@ void VM_PopulateDumpSharedSpace::doit() {
SystemDictionaryShared::check_excluded_classes();
StaticArchiveBuilder builder(&_rw_region, &_ro_region);
builder.set_current_dump_space(&_mc_region);
builder.gather_klasses_and_symbols();
_global_klass_objects = builder.klasses();
// Ensure the ConstMethods won't be modified at run-time
log_info(cds)("Updating ConstMethods ... ");
rewrite_nofast_bytecodes_and_calculate_fingerprints(THREAD);
log_info(cds)("done. ");
// Remove all references outside the metadata
log_info(cds)("Removing unshareable information ... ");
remove_unshareable_in_classes();
log_info(cds)("done. ");
builder.gather_source_objs();
CppVtables::allocate_cloned_cpp_vtptrs();
@ -786,6 +737,7 @@ void VM_PopulateDumpSharedSpace::doit() {
{
_mc_region.pack(&_rw_region);
builder.set_current_dump_space(&_rw_region);
builder.dump_rw_region();
#if INCLUDE_CDS_JAVA_HEAP
if (MetaspaceShared::use_full_module_graph()) {
@ -798,6 +750,7 @@ void VM_PopulateDumpSharedSpace::doit() {
}
{
_rw_region.pack(&_ro_region);
builder.set_current_dump_space(&_ro_region);
builder.dump_ro_region();
#if INCLUDE_CDS_JAVA_HEAP
if (MetaspaceShared::use_full_module_graph()) {
@ -818,6 +771,9 @@ void VM_PopulateDumpSharedSpace::doit() {
builder.relocate_well_known_klasses();
log_info(cds)("Make classes shareable");
builder.make_klasses_shareable();
char* serialized_data = dump_read_only_tables();
_ro_region.pack();
@ -871,9 +827,9 @@ void VM_PopulateDumpSharedSpace::doit() {
"for testing purposes only and should not be used in a production environment");
}
// There may be other pending VM operations that operate on the InstanceKlasses,
// which will fail because InstanceKlasses::remove_unshareable_info()
// has been called. Forget these operations and exit the VM directly.
// There may be pending VM operations. We have changed some global states
// (such as SystemDictionary::_well_known_klasses) that may cause these VM operations
// to fail. For safety, forget these operations and exit the VM directly.
vm_direct_exit(0);
}

View File

@ -185,15 +185,6 @@ void Universe::replace_mirror(BasicType t, oop new_mirror) {
Universe::_mirrors[t].replace(new_mirror);
}
// Not sure why CDS has to do this
void Universe::clear_basic_type_mirrors() {
for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
if (!is_reference_type((BasicType)i)) {
Universe::_mirrors[i].replace(NULL);
}
}
}
void Universe::basic_type_classes_do(void f(Klass*)) {
for (int i = T_BOOLEAN; i < T_LONG+1; i++) {
f(_typeArrayKlassObjs[i]);
@ -245,7 +236,11 @@ void Universe::serialize(SerializeClosure* f) {
_mirrors[i] = OopHandle(vm_global(), mirror_oop);
}
} else {
mirror_oop = _mirrors[i].resolve();
if (HeapShared::is_heap_object_archiving_allowed()) {
mirror_oop = _mirrors[i].resolve();
} else {
mirror_oop = NULL;
}
f->do_oop(&mirror_oop); // write to archive
}
if (mirror_oop != NULL) { // may be null if archived heap is disabled

View File

@ -232,7 +232,6 @@ class Universe: AllStatic {
static oop java_mirror(BasicType t);
static void replace_mirror(BasicType t, oop obj);
static void clear_basic_type_mirrors();
static oop main_thread_group();
static void set_main_thread_group(oop group);

View File

@ -283,7 +283,6 @@ void ConstantPool::archive_resolved_references(Thread* THREAD) {
ik->is_shared_app_class())) {
// Archiving resolved references for classes from non-builtin loaders
// is not yet supported.
set_resolved_references(OopHandle());
return;
}
@ -316,7 +315,6 @@ void ConstantPool::archive_resolved_references(Thread* THREAD) {
// resolved references will be created using the normal process
// when there is no archived value.
_cache->set_archived_references(archived);
set_resolved_references(OopHandle());
}
}
@ -396,15 +394,7 @@ void ConstantPool::remove_unshareable_info() {
// at runtime.
set_resolved_reference_length(
resolved_references() != NULL ? resolved_references()->length() : 0);
// If archiving heap objects is not allowed, clear the resolved references.
// Otherwise, it is cleared after the resolved references array is cached
// (see archive_resolved_references()).
// If DynamicDumpSharedSpaces is enabled, clear the resolved references also
// as java objects are not archived in the top layer.
if (!HeapShared::is_heap_object_archiving_allowed() || DynamicDumpSharedSpaces) {
set_resolved_references(OopHandle());
}
set_resolved_references(OopHandle());
// Shared ConstantPools are in the RO region, so the _flags cannot be modified.
// The _on_stack flag is used to prevent ConstantPools from deallocation during