8237767: Field layout computation overhaul

Reviewed-by: dholmes, coleenp, lfoltan, shade
This commit is contained in:
Frederic Parain 2020-02-10 09:49:12 -05:00
parent 304d764a72
commit 9886cb401c
19 changed files with 1528 additions and 231 deletions

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -213,14 +213,19 @@ ciInstanceKlass* ciInstanceKlass::get_canonical_holder(int offset) {
}
ciInstanceKlass* self = this;
for (;;) {
assert(self->is_loaded(), "must be loaded to have size");
ciInstanceKlass* super = self->super();
if (super == NULL || super->nof_nonstatic_fields() == 0 ||
!super->contains_field_offset(offset)) {
return self;
} else {
self = super; // return super->get_canonical_holder(offset)
assert(self->is_loaded(), "must be loaded to access field info");
ciField* field = self->get_field_by_offset(offset, false);
if (field != NULL) {
return field->holder();
} else {
for (;;) {
assert(self->is_loaded(), "must be loaded to have size");
ciInstanceKlass* super = self->super();
if (super == NULL || super->nof_nonstatic_fields() == 0) {
return self;
} else {
self = super; // return super->get_canonical_holder(offset)
}
}
}
}
@ -391,6 +396,13 @@ bool ciInstanceKlass::has_finalizable_subclass() {
return Dependencies::find_finalizable_subclass(get_instanceKlass()) != NULL;
}
// ------------------------------------------------------------------
// ciInstanceKlass::contains_field_offset
bool ciInstanceKlass::contains_field_offset(int offset) {
VM_ENTRY_MARK;
return get_instanceKlass()->contains_field_offset(offset);
}
// ------------------------------------------------------------------
// ciInstanceKlass::get_field_by_offset
ciField* ciInstanceKlass::get_field_by_offset(int field_offset, bool is_static) {
@ -457,15 +469,9 @@ int ciInstanceKlass::compute_nonstatic_fields() {
ciInstanceKlass* super = this->super();
GrowableArray<ciField*>* super_fields = NULL;
if (super != NULL && super->has_nonstatic_fields()) {
int super_fsize = super->nonstatic_field_size() * heapOopSize;
int super_flen = super->nof_nonstatic_fields();
super_fields = super->_nonstatic_fields;
assert(super_flen == 0 || super_fields != NULL, "first get nof_fields");
// See if I am no larger than my super; if so, I can use his fields.
if (fsize == super_fsize) {
_nonstatic_fields = super_fields;
return super_fields->length();
}
}
GrowableArray<ciField*>* fields = NULL;

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -225,9 +225,7 @@ public:
ciInstanceKlass* unique_concrete_subklass();
bool has_finalizable_subclass();
bool contains_field_offset(int offset) {
return instanceOopDesc::contains_field_offset(offset, nonstatic_field_size());
}
bool contains_field_offset(int offset);
// Get the instance of java.lang.Class corresponding to
// this klass. This instance is used for locking of

@ -30,6 +30,7 @@
#include "classfile/classLoaderData.inline.hpp"
#include "classfile/defaultMethods.hpp"
#include "classfile/dictionary.hpp"
#include "classfile/fieldLayoutBuilder.hpp"
#include "classfile/javaClasses.inline.hpp"
#include "classfile/moduleEntry.hpp"
#include "classfile/packageEntry.hpp"
@ -60,6 +61,7 @@
#include "prims/jvmtiExport.hpp"
#include "prims/jvmtiThreadState.hpp"
#include "runtime/arguments.hpp"
#include "runtime/fieldDescriptor.inline.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/javaCalls.hpp"
#include "runtime/os.hpp"
@ -1686,8 +1688,12 @@ void ClassFileParser::parse_fields(const ClassFileStream* const cfs,
field->set_allocation_type(atype);
// After field is initialized with type, we can augment it with aux info
if (parsed_annotations.has_any_annotations())
if (parsed_annotations.has_any_annotations()) {
parsed_annotations.apply_to(field);
if (field->is_contended()) {
_has_contended_fields = true;
}
}
}
int index = length;
@ -3932,39 +3938,6 @@ const InstanceKlass* ClassFileParser::parse_super_class(ConstantPool* const cp,
return super_klass;
}
static unsigned int compute_oop_map_count(const InstanceKlass* super,
unsigned int nonstatic_oop_map_count,
int first_nonstatic_oop_offset) {
unsigned int map_count =
NULL == super ? 0 : super->nonstatic_oop_map_count();
if (nonstatic_oop_map_count > 0) {
// We have oops to add to map
if (map_count == 0) {
map_count = nonstatic_oop_map_count;
}
else {
// Check whether we should add a new map block or whether the last one can
// be extended
const OopMapBlock* const first_map = super->start_of_nonstatic_oop_maps();
const OopMapBlock* const last_map = first_map + map_count - 1;
const int next_offset = last_map->offset() + last_map->count() * heapOopSize;
if (next_offset == first_nonstatic_oop_offset) {
// There is no gap bettwen superklass's last oop field and first
// local oop field, merge maps.
nonstatic_oop_map_count -= 1;
}
else {
// Superklass didn't end with a oop field, add extra maps
assert(next_offset < first_nonstatic_oop_offset, "just checking");
}
map_count += nonstatic_oop_map_count;
}
}
return map_count;
}
#ifndef PRODUCT
static void print_field_layout(const Symbol* name,
Array<u2>* fields,
@ -4002,18 +3975,121 @@ static void print_field_layout(const Symbol* name,
}
#endif
// Values needed for oopmap and InstanceKlass creation
class ClassFileParser::FieldLayoutInfo : public ResourceObj {
public:
int* nonstatic_oop_offsets;
unsigned int* nonstatic_oop_counts;
unsigned int nonstatic_oop_map_count;
unsigned int total_oop_map_count;
int instance_size;
int nonstatic_field_size;
int static_field_size;
bool has_nonstatic_fields;
};
OopMapBlocksBuilder::OopMapBlocksBuilder(unsigned int max_blocks) {
_max_nonstatic_oop_maps = max_blocks;
_nonstatic_oop_map_count = 0;
if (max_blocks == 0) {
_nonstatic_oop_maps = NULL;
} else {
_nonstatic_oop_maps =
NEW_RESOURCE_ARRAY(OopMapBlock, _max_nonstatic_oop_maps);
memset(_nonstatic_oop_maps, 0, sizeof(OopMapBlock) * max_blocks);
}
}
OopMapBlock* OopMapBlocksBuilder::last_oop_map() const {
assert(_nonstatic_oop_map_count > 0, "Has no oop maps");
return _nonstatic_oop_maps + (_nonstatic_oop_map_count - 1);
}
// addition of super oop maps
void OopMapBlocksBuilder::initialize_inherited_blocks(OopMapBlock* blocks, unsigned int nof_blocks) {
assert(nof_blocks && _nonstatic_oop_map_count == 0 &&
nof_blocks <= _max_nonstatic_oop_maps, "invariant");
memcpy(_nonstatic_oop_maps, blocks, sizeof(OopMapBlock) * nof_blocks);
_nonstatic_oop_map_count += nof_blocks;
}
// collection of oops
void OopMapBlocksBuilder::add(int offset, int count) {
if (_nonstatic_oop_map_count == 0) {
_nonstatic_oop_map_count++;
}
OopMapBlock* nonstatic_oop_map = last_oop_map();
if (nonstatic_oop_map->count() == 0) { // Unused map, set it up
nonstatic_oop_map->set_offset(offset);
nonstatic_oop_map->set_count(count);
} else if (nonstatic_oop_map->is_contiguous(offset)) { // contiguous, add
nonstatic_oop_map->increment_count(count);
} else { // Need a new one...
_nonstatic_oop_map_count++;
assert(_nonstatic_oop_map_count <= _max_nonstatic_oop_maps, "range check");
nonstatic_oop_map = last_oop_map();
nonstatic_oop_map->set_offset(offset);
nonstatic_oop_map->set_count(count);
}
}
// general purpose copy, e.g. into allocated instanceKlass
void OopMapBlocksBuilder::copy(OopMapBlock* dst) {
if (_nonstatic_oop_map_count != 0) {
memcpy(dst, _nonstatic_oop_maps, sizeof(OopMapBlock) * _nonstatic_oop_map_count);
}
}
// Sort and compact adjacent blocks
void OopMapBlocksBuilder::compact() {
if (_nonstatic_oop_map_count <= 1) {
return;
}
/*
* Since field layout sneeks in oops before values, we will be able to condense
* blocks. There is potential to compact between super, own refs and values
* containing refs.
*
* Currently compaction is slightly limited due to values being 8 byte aligned.
* This may well change: FixMe if it doesn't, the code below is fairly general purpose
* and maybe it doesn't need to be.
*/
qsort(_nonstatic_oop_maps, _nonstatic_oop_map_count, sizeof(OopMapBlock),
(_sort_Fn)OopMapBlock::compare_offset);
if (_nonstatic_oop_map_count < 2) {
return;
}
// Make a temp copy, and iterate through and copy back into the original
ResourceMark rm;
OopMapBlock* oop_maps_copy =
NEW_RESOURCE_ARRAY(OopMapBlock, _nonstatic_oop_map_count);
OopMapBlock* oop_maps_copy_end = oop_maps_copy + _nonstatic_oop_map_count;
copy(oop_maps_copy);
OopMapBlock* nonstatic_oop_map = _nonstatic_oop_maps;
unsigned int new_count = 1;
oop_maps_copy++;
while(oop_maps_copy < oop_maps_copy_end) {
assert(nonstatic_oop_map->offset() < oop_maps_copy->offset(), "invariant");
if (nonstatic_oop_map->is_contiguous(oop_maps_copy->offset())) {
nonstatic_oop_map->increment_count(oop_maps_copy->count());
} else {
nonstatic_oop_map++;
new_count++;
nonstatic_oop_map->set_offset(oop_maps_copy->offset());
nonstatic_oop_map->set_count(oop_maps_copy->count());
}
oop_maps_copy++;
}
assert(new_count <= _nonstatic_oop_map_count, "end up with more maps after compact() ?");
_nonstatic_oop_map_count = new_count;
}
void OopMapBlocksBuilder::print_on(outputStream* st) const {
st->print_cr(" OopMapBlocks: %3d /%3d", _nonstatic_oop_map_count, _max_nonstatic_oop_maps);
if (_nonstatic_oop_map_count > 0) {
OopMapBlock* map = _nonstatic_oop_maps;
OopMapBlock* last_map = last_oop_map();
assert(map <= last_map, "Last less than first");
while (map <= last_map) {
st->print_cr(" Offset: %3d -%3d Count: %3d", map->offset(),
map->offset() + map->offset_span() - heapOopSize, map->count());
map++;
}
}
}
void OopMapBlocksBuilder::print_value_on(outputStream* st) const {
print_on(st);
}
// Layout fields and fill in FieldLayoutInfo. Could use more refactoring!
void ClassFileParser::layout_fields(ConstantPool* cp,
@ -4100,16 +4176,15 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
// count[i] oops following. Before we know how many regions are required,
// we pessimistically allocate the maps to fit all the oops into the
// distinct regions.
//
// TODO: We add +1 to always allocate non-zero resource arrays; we need
// to figure out if we still need to do this.
unsigned int nonstatic_oop_map_count = 0;
unsigned int max_nonstatic_oop_maps = fac->count[NONSTATIC_OOP] + 1;
int* nonstatic_oop_offsets = NEW_RESOURCE_ARRAY_IN_THREAD(
THREAD, int, max_nonstatic_oop_maps);
unsigned int* const nonstatic_oop_counts = NEW_RESOURCE_ARRAY_IN_THREAD(
THREAD, unsigned int, max_nonstatic_oop_maps);
int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
int max_oop_map_count = super_oop_map_count + fac->count[NONSTATIC_OOP];
OopMapBlocksBuilder* nonstatic_oop_maps = new OopMapBlocksBuilder(max_oop_map_count);
if (super_oop_map_count > 0) {
nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
_super_klass->nonstatic_oop_map_count());
}
int first_nonstatic_oop_offset = 0; // will be set for first oop field
@ -4260,26 +4335,7 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
real_offset = next_nonstatic_oop_offset;
next_nonstatic_oop_offset += heapOopSize;
}
// Record this oop in the oop maps
if( nonstatic_oop_map_count > 0 &&
nonstatic_oop_offsets[nonstatic_oop_map_count - 1] ==
real_offset -
int(nonstatic_oop_counts[nonstatic_oop_map_count - 1]) *
heapOopSize ) {
// This oop is adjacent to the previous one, add to current oop map
assert(nonstatic_oop_map_count - 1 < max_nonstatic_oop_maps, "range check");
nonstatic_oop_counts[nonstatic_oop_map_count - 1] += 1;
} else {
// This oop is not adjacent to the previous one, create new oop map
assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check");
nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
nonstatic_oop_map_count += 1;
if( first_nonstatic_oop_offset == 0 ) { // Undefined
first_nonstatic_oop_offset = real_offset;
}
}
nonstatic_oop_maps->add(real_offset, 1);
break;
case NONSTATIC_BYTE:
if( nonstatic_byte_space_count > 0 ) {
@ -4392,26 +4448,7 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, heapOopSize);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += heapOopSize;
// Record this oop in the oop maps
if( nonstatic_oop_map_count > 0 &&
nonstatic_oop_offsets[nonstatic_oop_map_count - 1] ==
real_offset -
int(nonstatic_oop_counts[nonstatic_oop_map_count - 1]) *
heapOopSize ) {
// This oop is adjacent to the previous one, add to current oop map
assert(nonstatic_oop_map_count - 1 < max_nonstatic_oop_maps, "range check");
nonstatic_oop_counts[nonstatic_oop_map_count - 1] += 1;
} else {
// This oop is not adjacent to the previous one, create new oop map
assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check");
nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
nonstatic_oop_map_count += 1;
if( first_nonstatic_oop_offset == 0 ) { // Undefined
first_nonstatic_oop_offset = real_offset;
}
}
nonstatic_oop_maps->add(real_offset, 1);
break;
default:
@ -4475,9 +4512,7 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
(nonstatic_fields_count > 0), "double-check nonstatic start/end");
// Number of non-static oop map blocks allocated at end of klass.
const unsigned int total_oop_map_count =
compute_oop_map_count(_super_klass, nonstatic_oop_map_count,
first_nonstatic_oop_offset);
nonstatic_oop_maps->compact();
#ifndef PRODUCT
if (PrintFieldLayout) {
@ -4492,58 +4527,13 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
#endif
// Pass back information needed for InstanceKlass creation
info->nonstatic_oop_offsets = nonstatic_oop_offsets;
info->nonstatic_oop_counts = nonstatic_oop_counts;
info->nonstatic_oop_map_count = nonstatic_oop_map_count;
info->total_oop_map_count = total_oop_map_count;
info->instance_size = instance_size;
info->static_field_size = static_field_size;
info->nonstatic_field_size = nonstatic_field_size;
info->has_nonstatic_fields = has_nonstatic_fields;
info->oop_map_blocks = nonstatic_oop_maps;
info->_instance_size = instance_size;
info->_static_field_size = static_field_size;
info->_nonstatic_field_size = nonstatic_field_size;
info->_has_nonstatic_fields = has_nonstatic_fields;
}
static void fill_oop_maps(const InstanceKlass* k,
unsigned int nonstatic_oop_map_count,
const int* nonstatic_oop_offsets,
const unsigned int* nonstatic_oop_counts) {
assert(k != NULL, "invariant");
OopMapBlock* this_oop_map = k->start_of_nonstatic_oop_maps();
const InstanceKlass* const super = k->superklass();
const unsigned int super_count = super ? super->nonstatic_oop_map_count() : 0;
if (super_count > 0) {
// Copy maps from superklass
OopMapBlock* super_oop_map = super->start_of_nonstatic_oop_maps();
for (unsigned int i = 0; i < super_count; ++i) {
*this_oop_map++ = *super_oop_map++;
}
}
if (nonstatic_oop_map_count > 0) {
if (super_count + nonstatic_oop_map_count > k->nonstatic_oop_map_count()) {
// The counts differ because there is no gap between superklass's last oop
// field and the first local oop field. Extend the last oop map copied
// from the superklass instead of creating new one.
nonstatic_oop_map_count--;
nonstatic_oop_offsets++;
this_oop_map--;
this_oop_map->set_count(this_oop_map->count() + *nonstatic_oop_counts++);
this_oop_map++;
}
// Add new map blocks, fill them
while (nonstatic_oop_map_count-- > 0) {
this_oop_map->set_offset(*nonstatic_oop_offsets++);
this_oop_map->set_count(*nonstatic_oop_counts++);
this_oop_map++;
}
assert(k->start_of_nonstatic_oop_maps() + k->nonstatic_oop_map_count() ==
this_oop_map, "sanity");
}
}
void ClassFileParser::set_precomputed_flags(InstanceKlass* ik) {
assert(ik != NULL, "invariant");
@ -5498,17 +5488,17 @@ int ClassFileParser::verify_legal_method_signature(const Symbol* name,
int ClassFileParser::static_field_size() const {
assert(_field_info != NULL, "invariant");
return _field_info->static_field_size;
return _field_info->_static_field_size;
}
int ClassFileParser::total_oop_map_count() const {
assert(_field_info != NULL, "invariant");
return _field_info->total_oop_map_count;
return _field_info->oop_map_blocks->_nonstatic_oop_map_count;
}
jint ClassFileParser::layout_size() const {
assert(_field_info != NULL, "invariant");
return _field_info->instance_size;
return _field_info->_instance_size;
}
static void check_methods_for_intrinsics(const InstanceKlass* ik,
@ -5652,19 +5642,19 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
set_klass_to_deallocate(ik);
assert(_field_info != NULL, "invariant");
assert(ik->static_field_size() == _field_info->static_field_size, "sanity");
assert(ik->nonstatic_oop_map_count() == _field_info->total_oop_map_count,
"sanity");
assert(ik->static_field_size() == _field_info->_static_field_size, "sanity");
assert(ik->nonstatic_oop_map_count() == _field_info->oop_map_blocks->_nonstatic_oop_map_count,
"sanity");
assert(ik->is_instance_klass(), "sanity");
assert(ik->size_helper() == _field_info->instance_size, "sanity");
assert(ik->size_helper() == _field_info->_instance_size, "sanity");
// Fill in information already parsed
ik->set_should_verify_class(_need_verify);
// Not yet: supers are done below to support the new subtype-checking fields
ik->set_nonstatic_field_size(_field_info->nonstatic_field_size);
ik->set_has_nonstatic_fields(_field_info->has_nonstatic_fields);
ik->set_nonstatic_field_size(_field_info->_nonstatic_field_size);
ik->set_has_nonstatic_fields(_field_info->_has_nonstatic_fields);
assert(_fac != NULL, "invariant");
ik->set_static_oop_field_count(_fac->count[STATIC_OOP]);
@ -5755,10 +5745,15 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
// Compute transitive closure of interfaces this class implements
// Do final class setup
fill_oop_maps(ik,
_field_info->nonstatic_oop_map_count,
_field_info->nonstatic_oop_offsets,
_field_info->nonstatic_oop_counts);
OopMapBlocksBuilder* oop_map_blocks = _field_info->oop_map_blocks;
if (oop_map_blocks->_nonstatic_oop_map_count > 0) {
oop_map_blocks->copy(ik->start_of_nonstatic_oop_maps());
}
if (_has_contended_fields || _parsed_annotations->is_contended() ||
( _super_klass != NULL && _super_klass->has_contended_annotations())) {
ik->set_has_contended_annotations(true);
}
// Fill in has_finalizer, has_vanilla_constructor, and layout_helper
set_precomputed_flags(ik);
@ -6001,6 +5996,7 @@ ClassFileParser::ClassFileParser(ClassFileStream* stream,
_has_nonstatic_concrete_methods(false),
_declares_nonstatic_concrete_methods(false),
_has_final_method(false),
_has_contended_fields(false),
_has_finalizer(false),
_has_empty_finalizer(false),
_has_vanilla_constructor(false),
@ -6478,7 +6474,13 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
assert(_parsed_annotations != NULL, "invariant");
_field_info = new FieldLayoutInfo();
layout_fields(cp, _fac, _parsed_annotations, _field_info, CHECK);
if (UseNewFieldLayout) {
FieldLayoutBuilder lb(class_name(), super_klass(), _cp, _fields,
_parsed_annotations->is_contended(), _field_info);
lb.build_layout();
} else {
layout_fields(cp, _fac, _parsed_annotations, _field_info, CHECK);
}
// Compute reference typ
_rt = (NULL ==_super_klass) ? REF_NONE : _super_klass->reference_type();

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -28,6 +28,7 @@
#include "memory/referenceType.hpp"
#include "oops/annotations.hpp"
#include "oops/constantPool.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/typeArrayOop.hpp"
#include "utilities/accessFlags.hpp"
@ -45,17 +46,46 @@ class InstanceKlass;
class RecordComponent;
class Symbol;
class TempNewSymbol;
class FieldLayoutBuilder;
// Utility to collect and compact oop maps during layout
class OopMapBlocksBuilder : public ResourceObj {
public:
OopMapBlock* _nonstatic_oop_maps;
unsigned int _nonstatic_oop_map_count;
unsigned int _max_nonstatic_oop_maps;
OopMapBlocksBuilder(unsigned int max_blocks);
OopMapBlock* last_oop_map() const;
void initialize_inherited_blocks(OopMapBlock* blocks, unsigned int nof_blocks);
void add(int offset, int count);
void copy(OopMapBlock* dst);
void compact();
void print_on(outputStream* st) const;
void print_value_on(outputStream* st) const;
};
// Values needed for oopmap and InstanceKlass creation
class FieldLayoutInfo : public ResourceObj {
public:
OopMapBlocksBuilder* oop_map_blocks;
int _instance_size;
int _nonstatic_field_size;
int _static_field_size;
bool _has_nonstatic_fields;
};
// Parser for for .class files
//
// The bytes describing the class file structure is read from a Stream object
class ClassFileParser {
friend class FieldLayoutBuilder;
friend class FieldLayout;
class ClassAnnotationCollector;
class FieldAllocationCount;
class FieldAnnotationCollector;
class FieldLayoutInfo;
class ClassAnnotationCollector;
class FieldAllocationCount;
class FieldAnnotationCollector;
public:
// The ClassFileParser has an associated "publicity" level
@ -161,6 +191,7 @@ class ClassFileParser {
bool _has_nonstatic_concrete_methods;
bool _declares_nonstatic_concrete_methods;
bool _has_final_method;
bool _has_contended_fields;
// precomputed flags
bool _has_finalizer;

@ -0,0 +1,780 @@
/*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "jvm.h"
#include "classfile/classFileParser.hpp"
#include "classfile/fieldLayoutBuilder.hpp"
#include "memory/resourceArea.hpp"
#include "oops/array.hpp"
#include "oops/fieldStreams.inline.hpp"
#include "oops/instanceMirrorKlass.hpp"
#include "oops/klass.inline.hpp"
#include "runtime/fieldDescriptor.inline.hpp"
LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
_next_block(NULL),
_prev_block(NULL),
_kind(kind),
_offset(-1),
_alignment(1),
_size(size),
_field_index(-1),
_is_reference(false) {
assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
"Otherwise, should use the constructor with a field index argument");
assert(size > 0, "Sanity check");
}
LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
_next_block(NULL),
_prev_block(NULL),
_kind(kind),
_offset(-1),
_alignment(alignment),
_size(size),
_field_index(index),
_is_reference(is_reference) {
assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
"Other kind do not have a field index");
assert(size > 0, "Sanity check");
assert(alignment > 0, "Sanity check");
}
bool LayoutRawBlock::fit(int size, int alignment) {
int adjustment = 0;
if ((_offset % alignment) != 0) {
adjustment = alignment - (_offset % alignment);
}
return _size >= size + adjustment;
}
FieldGroup::FieldGroup(int contended_group) :
_next(NULL),
_primitive_fields(NULL),
_oop_fields(NULL),
_contended_group(contended_group), // -1 means no contended group, 0 means default contended group
_oop_count(0) {}
void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) {
int size = type2aelembytes(type);
LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
if (_primitive_fields == NULL) {
_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
}
_primitive_fields->append(block);
}
void FieldGroup::add_oop_field(AllFieldStream fs) {
int size = type2aelembytes(T_OBJECT);
LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
if (_oop_fields == NULL) {
_oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
}
_oop_fields->append(block);
_oop_count++;
}
void FieldGroup::sort_by_size() {
if (_primitive_fields != NULL) {
_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
}
}
FieldLayout::FieldLayout(Array<u2>* fields, ConstantPool* cp) :
_fields(fields),
_cp(cp),
_blocks(NULL),
_start(_blocks),
_last(_blocks) {}
void FieldLayout::initialize_static_layout() {
_blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
_blocks->set_offset(0);
_last = _blocks;
_start = _blocks;
// Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
// during bootstrapping, the size of the java.lang.Class is still not known when layout
// of static field is computed. Field offsets are fixed later when the size is known
// (see java_lang_Class::fixup_mirror())
if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
_blocks->set_offset(0);
}
}
void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
if (super_klass == NULL) {
_blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
_blocks->set_offset(0);
_last = _blocks;
_start = _blocks;
insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
} else {
reconstruct_layout(super_klass);
fill_holes(super_klass);
if (UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) {
_start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses
// to allocate fields in empty slots of their super classes
} else {
_start = _last;
}
}
}
LayoutRawBlock* FieldLayout::first_field_block() {
LayoutRawBlock* block = _start;
while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
&& block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
block = block->next_block();
}
return block;
}
// Insert a set of fields into a layout using a best-fit strategy.
// For each field, search for the smallest empty slot able to fit the field
// (satisfying both size and alignment requirements), if none is found,
// add the field at the end of the layout.
// Fields cannot be inserted before the block specified in the "start" argument
void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
if (list == NULL) return;
if (start == NULL) start = this->_start;
bool last_search_success = false;
int last_size = 0;
int last_alignment = 0;
for (int i = 0; i < list->length(); i ++) {
LayoutRawBlock* b = list->at(i);
LayoutRawBlock* cursor = NULL;
LayoutRawBlock* candidate = NULL;
// if start is the last block, just append the field
if (start == last_block()) {
candidate = last_block();
}
// Before iterating over the layout to find an empty slot fitting the field's requirements,
// check if the previous field had the same requirements and if the search for a fitting slot
// was successful. If the requirements were the same but the search failed, a new search will
// fail the same way, so just append the field at the of the layout.
else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
candidate = last_block();
} else {
// Iterate over the layout to find an empty slot fitting the field's requirements
last_size = b->size();
last_alignment = b->alignment();
cursor = last_block()->prev_block();
assert(cursor != NULL, "Sanity check");
last_search_success = true;
while (cursor != start) {
if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
if (candidate == NULL || cursor->size() < candidate->size()) {
candidate = cursor;
}
}
cursor = cursor->prev_block();
}
if (candidate == NULL) {
candidate = last_block();
last_search_success = false;
}
assert(candidate != NULL, "Candidate must not be null");
assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
}
insert_field_block(candidate, b);
}
}
// Used for classes with hard coded field offsets, insert a field at the specified offset */
void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
assert(block != NULL, "Sanity check");
block->set_offset(offset);
if (start == NULL) {
start = this->_start;
}
LayoutRawBlock* slot = start;
while (slot != NULL) {
if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
slot == _last){
assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
if (slot->offset() < block->offset()) {
int adjustment = block->offset() - slot->offset();
LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
insert(slot, adj);
}
insert(slot, block);
if (slot->size() == 0) {
remove(slot);
}
FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
return;
}
slot = slot->next_block();
}
fatal("Should have found a matching slot above, corrupted layout or invalid offset");
}
// The allocation logic uses a best fit strategy: the set of fields is allocated
// in the first empty slot big enough to contain the whole set ((including padding
// to fit alignment constraints).
void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
if (list == NULL) return;
if (start == NULL) {
start = _start;
}
// This code assumes that if the first block is well aligned, the following
// blocks would naturally be well aligned (no need for adjustment)
int size = 0;
for (int i = 0; i < list->length(); i++) {
size += list->at(i)->size();
}
LayoutRawBlock* candidate = NULL;
if (start == last_block()) {
candidate = last_block();
} else {
LayoutRawBlock* first = list->at(0);
candidate = last_block()->prev_block();
while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
if (candidate == start) {
candidate = last_block();
break;
}
candidate = candidate->prev_block();
}
assert(candidate != NULL, "Candidate must not be null");
assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
}
for (int i = 0; i < list->length(); i++) {
LayoutRawBlock* b = list->at(i);
insert_field_block(candidate, b);
assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
}
}
LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
if (slot->offset() % block->alignment() != 0) {
int adjustment = block->alignment() - (slot->offset() % block->alignment());
LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
insert(slot, adj);
}
insert(slot, block);
if (slot->size() == 0) {
remove(slot);
}
FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
return block;
}
void FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
while (ik != NULL) {
for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
BasicType type = Signature::basic_type(fs.signature());
// distinction between static and non-static fields is missing
if (fs.access_flags().is_static()) continue;
int size = type2aelembytes(type);
// INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
block->set_offset(fs.offset());
all_fields->append(block);
}
ik = ik->super() == NULL ? NULL : InstanceKlass::cast(ik->super());
}
all_fields->sort(LayoutRawBlock::compare_offset);
_blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
_blocks->set_offset(0);
_last = _blocks;
for(int i = 0; i < all_fields->length(); i++) {
LayoutRawBlock* b = all_fields->at(i);
_last->set_next_block(b);
b->set_prev_block(_last);
_last = b;
}
_start = _blocks;
}
// Called during the reconstruction of a layout, after fields from super
// classes have been inserted. It fills unused slots between inserted fields
// with EMPTY blocks, so the regular field insertion methods would work.
// This method handles classes with @Contended annotations differently
// by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
// fields to interfere with contended fields/classes.
void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
assert(_blocks != NULL, "Sanity check");
assert(_blocks->offset() == 0, "first block must be at offset zero");
LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
LayoutRawBlock* b = _blocks;
while (b->next_block() != NULL) {
if (b->next_block()->offset() > (b->offset() + b->size())) {
int size = b->next_block()->offset() - (b->offset() + b->size());
LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
empty->set_offset(b->offset() + b->size());
empty->set_next_block(b->next_block());
b->next_block()->set_prev_block(empty);
b->set_next_block(empty);
empty->set_prev_block(b);
}
b = b->next_block();
}
assert(b->next_block() == NULL, "Invariant at this point");
assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
// If the super class has @Contended annotation, a padding block is
// inserted at the end to ensure that fields from the subclasses won't share
// the cache line of the last field of the contended class
if (super_klass->has_contended_annotations()) {
LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
p->set_offset(b->offset() + b->size());
b->set_next_block(p);
p->set_prev_block(b);
b = p;
}
if (!UseEmptySlotsInSupers) {
// Add an empty slots to align fields of the subclass on a heapOopSize boundary
// in order to emulate the behavior of the previous algorithm
int align = (b->offset() + b->size()) % heapOopSize;
if (align != 0) {
int sz = heapOopSize - align;
LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
p->set_offset(b->offset() + b->size());
b->set_next_block(p);
p->set_prev_block(b);
b = p;
}
}
LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
last->set_offset(b->offset() + b->size());
assert(last->offset() > 0, "Sanity check");
b->set_next_block(last);
last->set_prev_block(b);
_last = last;
}
LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
block->set_offset(slot->offset());
slot->set_offset(slot->offset() + block->size());
assert((slot->size() - block->size()) < slot->size(), "underflow checking");
assert(slot->size() - block->size() >= 0, "no negative size allowed");
slot->set_size(slot->size() - block->size());
block->set_prev_block(slot->prev_block());
block->set_next_block(slot);
slot->set_prev_block(block);
if (block->prev_block() != NULL) {
block->prev_block()->set_next_block(block);
}
if (_blocks == slot) {
_blocks = block;
}
return block;
}
void FieldLayout::remove(LayoutRawBlock* block) {
assert(block != NULL, "Sanity check");
assert(block != _last, "Sanity check");
if (_blocks == block) {
_blocks = block->next_block();
if (_blocks != NULL) {
_blocks->set_prev_block(NULL);
}
} else {
assert(block->prev_block() != NULL, "_prev should be set for non-head blocks");
block->prev_block()->set_next_block(block->next_block());
block->next_block()->set_prev_block(block->prev_block());
}
if (block == _start) {
_start = block->prev_block();
}
}
void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
ResourceMark rm;
LayoutRawBlock* b = _blocks;
while(b != _last) {
switch(b->kind()) {
case LayoutRawBlock::REGULAR: {
FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
output->print_cr(" @%d \"%s\" %s %d/%d %s",
b->offset(),
fi->name(_cp)->as_C_string(),
fi->signature(_cp)->as_C_string(),
b->size(),
b->alignment(),
"REGULAR");
break;
}
case LayoutRawBlock::FLATTENED: {
FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
output->print_cr(" @%d \"%s\" %s %d/%d %s",
b->offset(),
fi->name(_cp)->as_C_string(),
fi->signature(_cp)->as_C_string(),
b->size(),
b->alignment(),
"FLATTENED");
break;
}
case LayoutRawBlock::RESERVED: {
output->print_cr(" @%d %d/- %s",
b->offset(),
b->size(),
"RESERVED");
break;
}
case LayoutRawBlock::INHERITED: {
assert(!is_static, "Static fields are not inherited in layouts");
assert(super != NULL, "super klass must be provided to retrieve inherited fields info");
bool found = false;
const InstanceKlass* ik = super;
while (!found && ik != NULL) {
for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
if (fs.offset() == b->offset()) {
output->print_cr(" @%d \"%s\" %s %d/%d %s",
b->offset(),
fs.name()->as_C_string(),
fs.signature()->as_C_string(),
b->size(),
b->size(), // so far, alignment constraint == size, will change with Valhalla
"INHERITED");
found = true;
break;
}
}
ik = ik->java_super();
}
break;
}
case LayoutRawBlock::EMPTY:
output->print_cr(" @%d %d/1 %s",
b->offset(),
b->size(),
"EMPTY");
break;
case LayoutRawBlock::PADDING:
output->print_cr(" @%d %d/1 %s",
b->offset(),
b->size(),
"PADDING");
break;
}
b = b->next_block();
}
}
FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
Array<u2>* fields, bool is_contended, FieldLayoutInfo* info) :
_classname(classname),
_super_klass(super_klass),
_constant_pool(constant_pool),
_fields(fields),
_info(info),
_root_group(NULL),
_contended_groups(GrowableArray<FieldGroup*>(8)),
_static_fields(NULL),
_layout(NULL),
_static_layout(NULL),
_nonstatic_oopmap_count(0),
_alignment(-1),
_has_nonstatic_fields(false),
_is_contended(is_contended) {}
FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
assert(g > 0, "must only be called for named contended groups");
FieldGroup* fg = NULL;
for (int i = 0; i < _contended_groups.length(); i++) {
fg = _contended_groups.at(i);
if (fg->contended_group() == g) return fg;
}
fg = new FieldGroup(g);
_contended_groups.append(fg);
return fg;
}
void FieldLayoutBuilder::prologue() {
_layout = new FieldLayout(_fields, _constant_pool);
const InstanceKlass* super_klass = _super_klass;
_layout->initialize_instance_layout(super_klass);
if (super_klass != NULL) {
_has_nonstatic_fields = super_klass->has_nonstatic_fields();
}
_static_layout = new FieldLayout(_fields, _constant_pool);
_static_layout->initialize_static_layout();
_static_fields = new FieldGroup();
_root_group = new FieldGroup();
}
// Field sorting for regular classes:
// - fields are sorted in static and non-static fields
// - non-static fields are also sorted according to their contention group
// (support of the @Contended annotation)
// - @Contended annotation is ignored for static fields
void FieldLayoutBuilder::regular_field_sorting() {
for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
FieldGroup* group = NULL;
if (fs.access_flags().is_static()) {
group = _static_fields;
} else {
_has_nonstatic_fields = true;
if (fs.is_contended()) {
int g = fs.contended_group();
if (g == 0) {
group = new FieldGroup(true);
_contended_groups.append(group);
} else {
group = get_or_create_contended_group(g);
}
} else {
group = _root_group;
}
}
assert(group != NULL, "invariant");
BasicType type = Signature::basic_type(fs.signature());
switch(type) {
case T_BYTE:
case T_CHAR:
case T_DOUBLE:
case T_FLOAT:
case T_INT:
case T_LONG:
case T_SHORT:
case T_BOOLEAN:
group->add_primitive_field(fs, type);
break;
case T_OBJECT:
case T_ARRAY:
if (group != _static_fields) _nonstatic_oopmap_count++;
group->add_oop_field(fs);
break;
default:
fatal("Something wrong?");
}
}
_root_group->sort_by_size();
_static_fields->sort_by_size();
if (!_contended_groups.is_empty()) {
for (int i = 0; i < _contended_groups.length(); i++) {
_contended_groups.at(i)->sort_by_size();
}
}
}
void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
if (ContendedPaddingWidth > 0) {
LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
_layout->insert(slot, padding);
}
}
// Computation of regular classes layout is an evolution of the previous default layout
// (FieldAllocationStyle 1):
// - primitive fields are allocated first (from the biggest to the smallest)
// - then oop fields are allocated, either in existing gaps or at the end of
// the layout
void FieldLayoutBuilder::compute_regular_layout() {
bool need_tail_padding = false;
prologue();
regular_field_sorting();
if (_is_contended) {
_layout->set_start(_layout->last_block());
// insertion is currently easy because the current strategy doesn't try to fill holes
// in super classes layouts => the _start block is by consequence the _last_block
insert_contended_padding(_layout->start());
need_tail_padding = true;
}
_layout->add(_root_group->primitive_fields());
_layout->add(_root_group->oop_fields());
if (!_contended_groups.is_empty()) {
for (int i = 0; i < _contended_groups.length(); i++) {
FieldGroup* cg = _contended_groups.at(i);
LayoutRawBlock* start = _layout->last_block();
insert_contended_padding(start);
_layout->add(cg->primitive_fields(), start);
_layout->add(cg->oop_fields(), start);
need_tail_padding = true;
}
}
if (need_tail_padding) {
insert_contended_padding(_layout->last_block());
}
_static_layout->add_contiguously(this->_static_fields->oop_fields());
_static_layout->add(this->_static_fields->primitive_fields());
epilogue();
}
// Compute layout of the java/lang/ref/Reference class according
// to the hard coded offsets of its fields
void FieldLayoutBuilder::compute_java_lang_ref_Reference_layout() {
prologue();
regular_field_sorting();
assert(_contended_groups.is_empty(), "java.lang.Reference has no @Contended annotations");
assert(_root_group->primitive_fields() == NULL, "java.lang.Reference has no nonstatic primitive fields");
int field_count = 0;
int offset = -1;
for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
LayoutRawBlock* b = _root_group->oop_fields()->at(i);
FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
if (fi->name(_constant_pool)->equals("referent")) {
offset = java_lang_ref_Reference::referent_offset;
} else if (fi->name(_constant_pool)->equals("queue")) {
offset = java_lang_ref_Reference::queue_offset;
} else if (fi->name(_constant_pool)->equals("next")) {
offset = java_lang_ref_Reference::next_offset;
} else if (fi->name(_constant_pool)->equals("discovered")) {
offset = java_lang_ref_Reference::discovered_offset;
}
assert(offset != -1, "Unknown field");
_layout->add_field_at_offset(b, offset);
field_count++;
}
assert(field_count == 4, "Wrong number of fields in java.lang.ref.Reference");
_static_layout->add_contiguously(this->_static_fields->oop_fields());
_static_layout->add(this->_static_fields->primitive_fields());
epilogue();
}
// Compute layout of the boxing class according
// to the hard coded offsets of their fields
void FieldLayoutBuilder::compute_boxing_class_layout() {
prologue();
regular_field_sorting();
assert(_contended_groups.is_empty(), "Boxing classes have no @Contended annotations");
assert(_root_group->oop_fields() == NULL, "Boxing classes have no nonstatic oops fields");
int field_count = 0;
int offset = -1;
for (int i = 0; i < _root_group->primitive_fields()->length(); i++) {
LayoutRawBlock* b = _root_group->primitive_fields()->at(i);
FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
assert(fi->name(_constant_pool)->equals("value"), "Boxing classes have a single nonstatic field named 'value'");
BasicType type = Signature::basic_type(fi->signature(_constant_pool));
offset = java_lang_boxing_object::value_offset_in_bytes(type);
assert(offset != -1, "Unknown field");
_layout->add_field_at_offset(b, offset);
field_count++;
}
assert(field_count == 1, "Wrong number of fields for a boxing class");
_static_layout->add_contiguously(this->_static_fields->oop_fields());
_static_layout->add(this->_static_fields->primitive_fields());
epilogue();
}
void FieldLayoutBuilder::epilogue() {
// Computing oopmaps
int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
OopMapBlocksBuilder* nonstatic_oop_maps =
new OopMapBlocksBuilder(max_oop_map_count);
if (super_oop_map_count > 0) {
nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
_super_klass->nonstatic_oop_map_count());
}
if (_root_group->oop_fields() != NULL) {
for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
LayoutRawBlock* b = _root_group->oop_fields()->at(i);
nonstatic_oop_maps->add(b->offset(), 1);
}
}
if (!_contended_groups.is_empty()) {
for (int i = 0; i < _contended_groups.length(); i++) {
FieldGroup* cg = _contended_groups.at(i);
if (cg->oop_count() > 0) {
assert(cg->oop_fields() != NULL && cg->oop_fields()->at(0) != NULL, "oop_count > 0 but no oop fields found");
nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
}
}
}
nonstatic_oop_maps->compact();
int instance_end = align_up(_layout->last_block()->offset(), wordSize);
int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
int static_fields_size = (static_fields_end -
InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
// Pass back information needed for InstanceKlass creation
_info->oop_map_blocks = nonstatic_oop_maps;
_info->_instance_size = align_object_size(instance_end / wordSize);
_info->_static_field_size = static_fields_size;
_info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
_info->_has_nonstatic_fields = _has_nonstatic_fields;
if (PrintFieldLayout) {
ResourceMark rm;
tty->print_cr("Layout of class %s", _classname->as_C_string());
tty->print_cr("Instance fields:");
_layout->print(tty, false, _super_klass);
tty->print_cr("Static fields:");
_static_layout->print(tty, true, NULL);
tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
tty->print_cr("---");
}
}
void FieldLayoutBuilder::build_layout() {
if (_classname == vmSymbols::java_lang_ref_Reference()) {
compute_java_lang_ref_Reference_layout();
} else if (_classname == vmSymbols::java_lang_Boolean() ||
_classname == vmSymbols::java_lang_Character() ||
_classname == vmSymbols::java_lang_Float() ||
_classname == vmSymbols::java_lang_Double() ||
_classname == vmSymbols::java_lang_Byte() ||
_classname == vmSymbols::java_lang_Short() ||
_classname == vmSymbols::java_lang_Integer() ||
_classname == vmSymbols::java_lang_Long()) {
compute_boxing_class_layout();
} else {
compute_regular_layout();
}
}

@ -0,0 +1,267 @@
/*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_CLASSFILE_FIELDLAYOUTBUILDER_HPP
#define SHARE_CLASSFILE_FIELDLAYOUTBUILDER_HPP
#include "classfile/classFileParser.hpp"
#include "classfile/classLoaderData.hpp"
#include "memory/allocation.hpp"
#include "oops/fieldStreams.hpp"
#include "utilities/growableArray.hpp"
// Classes below are used to compute the field layout of classes.
// A LayoutRawBlock describes an element of a layout.
// Each field is represented by a LayoutRawBlock.
// LayoutRawBlocks can also represent elements injected by the JVM:
// padding, empty blocks, inherited fields, etc.
// All LayoutRawBlocks must have a size and an alignment. The size is the
// exact size of the field expressed in bytes. The alignment is
// the alignment constraint of the field (1 for byte, 2 for short,
// 4 for int, 8 for long, etc.)
//
// LayoutRawBlock are designed to be used in two data structures:
// - a linked list in a layout (using _next_block, _prev_block)
// - a GrowableArray in field group (the growable array contains pointers to LayoutRawBlocks)
//
// next/prev pointers are included in the LayoutRawBlock class to narrow
// the number of allocation required during the computation of a layout.
//
class LayoutRawBlock : public ResourceObj {
public:
// Some code relies on the order of values below.
enum Kind {
EMPTY, // empty slot, space is taken from this to allocate fields
RESERVED, // reserved for JVM usage (for instance object header)
PADDING, // padding (because of alignment constraints or @Contended)
REGULAR, // primitive or oop field (including non-flattened inline fields)
FLATTENED, // flattened field
INHERITED // field(s) inherited from super classes
};
private:
LayoutRawBlock* _next_block;
LayoutRawBlock* _prev_block;
Kind _kind;
int _offset;
int _alignment;
int _size;
int _field_index;
bool _is_reference;
public:
LayoutRawBlock(Kind kind, int size);
LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference = false);
LayoutRawBlock* next_block() const { return _next_block; }
void set_next_block(LayoutRawBlock* next) { _next_block = next; }
LayoutRawBlock* prev_block() const { return _prev_block; }
void set_prev_block(LayoutRawBlock* prev) { _prev_block = prev; }
Kind kind() const { return _kind; }
int offset() const {
assert(_offset >= 0, "Must be initialized");
return _offset;
}
void set_offset(int offset) { _offset = offset; }
int alignment() const { return _alignment; }
int size() const { return _size; }
void set_size(int size) { _size = size; }
int field_index() const {
assert(_field_index != -1, "Must be initialized");
return _field_index;
}
bool is_reference() const { return _is_reference; }
bool fit(int size, int alignment);
static int compare_offset(LayoutRawBlock** x, LayoutRawBlock** y) { return (*x)->offset() - (*y)->offset(); }
// compare_size_inverted() returns the opposite of a regular compare method in order to
// sort fields in decreasing order.
// Note: with line types, the comparison should include alignment constraint if sizes are equals
static int compare_size_inverted(LayoutRawBlock** x, LayoutRawBlock** y) {
#ifdef _WINDOWS
// qsort() on Windows reverse the order of fields with the same size
// the extension of the comparison function below preserves this order
int diff = (*y)->size() - (*x)->size();
if (diff == 0) {
diff = (*x)->field_index() - (*y)->field_index();
}
return diff;
#else
return (*y)->size() - (*x)->size();
#endif // _WINDOWS
}
};
// A Field group represents a set of fields that have to be allocated together,
// this is the way the @Contended annotation is supported.
// Inside a FieldGroup, fields are sorted based on their kind: primitive,
// oop, or flattened.
//
class FieldGroup : public ResourceObj {
private:
FieldGroup* _next;
GrowableArray<LayoutRawBlock*>* _primitive_fields;
GrowableArray<LayoutRawBlock*>* _oop_fields;
int _contended_group;
int _oop_count;
static const int INITIAL_LIST_SIZE = 16;
public:
FieldGroup(int contended_group = -1);
FieldGroup* next() const { return _next; }
void set_next(FieldGroup* next) { _next = next; }
GrowableArray<LayoutRawBlock*>* primitive_fields() const { return _primitive_fields; }
GrowableArray<LayoutRawBlock*>* oop_fields() const { return _oop_fields; }
int contended_group() const { return _contended_group; }
int oop_count() const { return _oop_count; }
void add_primitive_field(AllFieldStream fs, BasicType type);
void add_oop_field(AllFieldStream fs);
void sort_by_size();
};
// The FieldLayout class represents a set of fields organized
// in a layout.
// An instance of FieldLayout can either represent the layout
// of non-static fields (used in an instance object) or the
// layout of static fields (to be included in the class mirror).
//
// _block is a pointer to a list of LayoutRawBlock ordered by increasing
// offsets.
// _start points to the LayoutRawBlock with the first offset that can
// be used to allocate fields of the current class
// _last points to the last LayoutRawBlock of the list. In order to
// simplify the code, the LayoutRawBlock list always ends with an
// EMPTY block (the kind of LayoutRawBlock from which space is taken
// to allocate fields) with a size big enough to satisfy all
// field allocations.
//
class FieldLayout : public ResourceObj {
private:
Array<u2>* _fields;
ConstantPool* _cp;
LayoutRawBlock* _blocks; // the layout being computed
LayoutRawBlock* _start; // points to the first block where a field can be inserted
LayoutRawBlock* _last; // points to the last block of the layout (big empty block)
public:
FieldLayout(Array<u2>* fields, ConstantPool* cp);
void initialize_static_layout();
void initialize_instance_layout(const InstanceKlass* ik);
LayoutRawBlock* first_empty_block() {
LayoutRawBlock* block = _start;
while (block->kind() != LayoutRawBlock::EMPTY) {
block = block->next_block();
}
return block;
}
LayoutRawBlock* start() { return _start; }
void set_start(LayoutRawBlock* start) { _start = start; }
LayoutRawBlock* last_block() { return _last; }
LayoutRawBlock* first_field_block();
void add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start = NULL);
void add_field_at_offset(LayoutRawBlock* blocks, int offset, LayoutRawBlock* start = NULL);
void add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start = NULL);
LayoutRawBlock* insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block);
void reconstruct_layout(const InstanceKlass* ik);
void fill_holes(const InstanceKlass* ik);
LayoutRawBlock* insert(LayoutRawBlock* slot, LayoutRawBlock* block);
void remove(LayoutRawBlock* block);
void print(outputStream* output, bool is_static, const InstanceKlass* super);
};
// FieldLayoutBuilder is the main entry point for layout computation.
// This class has three methods to generate layout: one for regular classes
// and two for classes with hard coded offsets (java,lang.ref.Reference
// and the boxing classes). The rationale for having multiple methods
// is that each kind of class has a different set goals regarding
// its layout, so instead of mixing several layout strategies into a
// single method, each kind has its own method (see comments below
// for more details about the allocation strategies).
//
// Computing the layout of a class always goes through 4 steps:
// 1 - Prologue: preparation of data structure and gathering of
// layout information inherited from super classes
// 2 - Field sorting: fields are sorted according to their
// kind (oop, primitive, inline class) and their contention
// annotation (if any)
// 3 - Layout is computed from the set of lists generated during
// step 2
// 4 - Epilogue: oopmaps are generated, layout information is
// prepared so other VM components can use it (instance size,
// static field size, non-static field size, etc.)
//
// Steps 1 and 4 are common to all layout computations. Step 2 and 3
// can vary with the allocation strategy.
//
class FieldLayoutBuilder : public ResourceObj {
private:
const Symbol* _classname;
const InstanceKlass* _super_klass;
ConstantPool* _constant_pool;
Array<u2>* _fields;
FieldLayoutInfo* _info;
FieldGroup* _root_group;
GrowableArray<FieldGroup*> _contended_groups;
FieldGroup* _static_fields;
FieldLayout* _layout;
FieldLayout* _static_layout;
int _nonstatic_oopmap_count;
int _alignment;
bool _has_nonstatic_fields;
bool _is_contended; // is a contended class?
public:
FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
Array<u2>* fields, bool is_contended, FieldLayoutInfo* info);
int get_alignment() {
assert(_alignment != -1, "Uninitialized");
return _alignment;
}
void build_layout();
void compute_regular_layout();
void compute_java_lang_ref_Reference_layout();
void compute_boxing_class_layout();
void insert_contended_padding(LayoutRawBlock* slot);
private:
void prologue();
void epilogue();
void regular_field_sorting();
FieldGroup* get_or_create_contended_group(int g);
};
#endif // SHARE_CLASSFILE_FIELDLAYOUTBUILDER_HPP

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -157,7 +157,7 @@
nonstatic_field(InstanceKlass, _constants, ConstantPool*) \
nonstatic_field(InstanceKlass, _source_file_name_index, u2) \
nonstatic_field(InstanceKlass, _init_state, u1) \
nonstatic_field(InstanceKlass, _misc_flags, u2) \
nonstatic_field(InstanceKlass, _misc_flags, u4) \
nonstatic_field(InstanceKlass, _annotations, Annotations*) \
\
volatile_nonstatic_field(JavaFrameAnchor, _last_Java_sp, intptr_t*) \

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -47,7 +47,6 @@ class FieldStreamBase : public StackObj {
fieldDescriptor _fd_buf;
FieldInfo* field() const { return FieldInfo::from_field_array(_fields, _index); }
InstanceKlass* field_holder() const { return _constants->pool_holder(); }
int init_generic_signature_start_slot() {
int length = _fields->length();
@ -87,6 +86,7 @@ class FieldStreamBase : public StackObj {
// accessors
int index() const { return _index; }
InstanceKlass* field_holder() const { return _constants->pool_holder(); }
void next() {
if (access_flags().field_has_generic_signature()) {

@ -1399,6 +1399,10 @@ void InstanceKlass::mask_for(const methodHandle& method, int bci,
oop_map_cache->lookup(method, bci, entry_for);
}
bool InstanceKlass::contains_field_offset(int offset) {
fieldDescriptor fd;
return find_field_from_offset(offset, false, &fd);
}
bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
for (JavaFieldStream fs(this); !fs.done(); fs.next()) {

@ -103,12 +103,28 @@ class OopMapBlock {
uint count() const { return _count; }
void set_count(uint count) { _count = count; }
void increment_count(int diff) { _count += diff; }
int offset_span() const { return _count * heapOopSize; }
int end_offset() const {
return offset() + offset_span();
}
bool is_contiguous(int another_offset) const {
return another_offset == end_offset();
}
// sizeof(OopMapBlock) in words.
static const int size_in_words() {
return align_up((int)sizeof(OopMapBlock), wordSize) >>
LogBytesPerWord;
}
static int compare_offset(const OopMapBlock* a, const OopMapBlock* b) {
return a->offset() - b->offset();
}
private:
int _offset;
uint _count;
@ -212,7 +228,6 @@ class InstanceKlass: public Klass {
// _is_marked_dependent can be set concurrently, thus cannot be part of the
// _misc_flags.
bool _is_marked_dependent; // used for marking during flushing and deoptimization
bool _is_being_redefined; // used for locking redefinition
// The low two bits of _misc_flags contains the kind field.
// This can be used to quickly discriminate among the four kinds of
@ -243,12 +258,14 @@ class InstanceKlass: public Klass {
_misc_is_shared_boot_class = 1 << 12, // defining class loader is boot class loader
_misc_is_shared_platform_class = 1 << 13, // defining class loader is platform class loader
_misc_is_shared_app_class = 1 << 14, // defining class loader is app class loader
_misc_has_resolved_methods = 1 << 15 // resolved methods table entries added for this class
_misc_has_resolved_methods = 1 << 15, // resolved methods table entries added for this class
_misc_is_being_redefined = 1 << 16, // used for locking redefinition
_misc_has_contended_annotations = 1 << 17 // has @Contended annotation
};
u2 loader_type_bits() {
return _misc_is_shared_boot_class|_misc_is_shared_platform_class|_misc_is_shared_app_class;
}
u2 _misc_flags;
u4 _misc_flags;
u2 _minor_version; // minor version number of class file
u2 _major_version; // major version number of class file
Thread* _init_thread; // Pointer to current thread doing initialization (to handle recursive initialization)
@ -571,9 +588,7 @@ public:
Klass* find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const;
// find a non-static or static field given its offset within the class.
bool contains_field_offset(int offset) {
return instanceOopDesc::contains_field_offset(offset, nonstatic_field_size());
}
bool contains_field_offset(int offset);
bool find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const;
bool find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const;
@ -735,10 +750,29 @@ public:
_nonstatic_oop_map_size = words;
}
bool has_contended_annotations() const {
return ((_misc_flags & _misc_has_contended_annotations) != 0);
}
void set_has_contended_annotations(bool value) {
if (value) {
_misc_flags |= _misc_has_contended_annotations;
} else {
_misc_flags &= ~_misc_has_contended_annotations;
}
}
#if INCLUDE_JVMTI
// Redefinition locking. Class can only be redefined by one thread at a time.
bool is_being_redefined() const { return _is_being_redefined; }
void set_is_being_redefined(bool value) { _is_being_redefined = value; }
bool is_being_redefined() const {
return ((_misc_flags & _misc_is_being_redefined) != 0);
}
void set_is_being_redefined(bool value) {
if (value) {
_misc_flags |= _misc_is_being_redefined;
} else {
_misc_flags &= ~_misc_is_being_redefined;
}
}
// RedefineClasses() support for previous versions:
void add_previous_version(InstanceKlass* ik, int emcp_method_count);

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -43,12 +43,6 @@ class instanceOopDesc : public oopDesc {
klass_gap_offset_in_bytes() :
sizeof(instanceOopDesc);
}
static bool contains_field_offset(int offset, int nonstatic_field_size) {
int base_in_bytes = base_offset_in_bytes();
return (offset >= base_in_bytes &&
(offset-base_in_bytes) < nonstatic_field_size * heapOopSize);
}
};
#endif // SHARE_OOPS_INSTANCEOOP_HPP

@ -522,6 +522,7 @@ static SpecialFlag const special_jvm_flags[] = {
{ "AllowRedefinitionToAddDeleteMethods", JDK_Version::jdk(13), JDK_Version::undefined(), JDK_Version::undefined() },
{ "FlightRecorder", JDK_Version::jdk(13), JDK_Version::undefined(), JDK_Version::undefined() },
{ "MonitorBound", JDK_Version::jdk(14), JDK_Version::jdk(15), JDK_Version::jdk(16) },
{ "UseNewFieldLayout", JDK_Version::jdk(15), JDK_Version::jdk(16), JDK_Version::jdk(17) },
// --- Deprecated alias flags (see also aliased_jvm_flags) - sorted by obsolete_in then expired_in:
{ "DefaultMaxRAMFraction", JDK_Version::jdk(8), JDK_Version::undefined(), JDK_Version::undefined() },

@ -1156,18 +1156,18 @@ int compare(ReassignedField* left, ReassignedField* right) {
// Restore fields of an eliminated instance object using the same field order
// returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
if (klass->superklass() != NULL) {
svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
}
GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
ReassignedField field;
field._offset = fs.offset();
field._type = Signature::basic_type(fs.signature());
fields->append(field);
InstanceKlass* ik = klass;
while (ik != NULL) {
for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
ReassignedField field;
field._offset = fs.offset();
field._type = Signature::basic_type(fs.signature());
fields->append(field);
}
}
ik = ik->superklass();
}
fields->sort(compare);
for (int i = 0; i < fields->length(); i++) {

@ -2488,7 +2488,15 @@ const size_t minimumSymbolTableSize = 1024;
"Start flight recording with options")) \
\
experimental(bool, UseFastUnorderedTimeStamps, false, \
"Use platform unstable time where supported for timestamps only")
"Use platform unstable time where supported for timestamps only") \
\
product(bool, UseNewFieldLayout, true, \
"(Deprecated) Use new algorithm to compute field layouts") \
\
product(bool, UseEmptySlotsInSupers, true, \
"Allow allocating fields in empty slots of super-classes") \
\
// Interface macros
#define DECLARE_PRODUCT_FLAG(type, name, value, doc) extern "C" type name;

@ -235,7 +235,7 @@ typedef HashtableEntry<InstanceKlass*, mtClass> KlassHashtableEntry;
nonstatic_field(InstanceKlass, _static_oop_field_count, u2) \
nonstatic_field(InstanceKlass, _nonstatic_oop_map_size, int) \
nonstatic_field(InstanceKlass, _is_marked_dependent, bool) \
nonstatic_field(InstanceKlass, _misc_flags, u2) \
nonstatic_field(InstanceKlass, _misc_flags, u4) \
nonstatic_field(InstanceKlass, _minor_version, u2) \
nonstatic_field(InstanceKlass, _major_version, u2) \
nonstatic_field(InstanceKlass, _init_state, u1) \

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,8 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import jdk.vm.ci.common.JVMCIError;
@ -61,6 +63,7 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
private static final HotSpotResolvedJavaField[] NO_FIELDS = new HotSpotResolvedJavaField[0];
private static final int METHOD_CACHE_ARRAY_CAPACITY = 8;
private static final SortByOffset fieldSortingMethod = new SortByOffset();
/**
* The Java class this type represents.
@ -708,6 +711,12 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
}
}
static class SortByOffset implements Comparator<ResolvedJavaField> {
public int compare(ResolvedJavaField a, ResolvedJavaField b) {
return a.getOffset() - b.getOffset();
}
}
@Override
public ResolvedJavaField[] getInstanceFields(boolean includeSuperclasses) {
if (instanceFields == null) {
@ -727,8 +736,17 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
// This class does not have any instance fields of its own.
return NO_FIELDS;
} else if (superClassFieldCount != 0) {
// Fields of the current class can be interleaved with fields of its super-classes
// but the array of fields to be returned must be sorted by increasing offset
// This code populates the array, then applies the sorting function
HotSpotResolvedJavaField[] result = new HotSpotResolvedJavaField[instanceFields.length - superClassFieldCount];
System.arraycopy(instanceFields, superClassFieldCount, result, 0, result.length);
int i = 0;
for (HotSpotResolvedJavaField f : instanceFields) {
if (f.getDeclaringClass() == this) {
result[i++] = f;
}
}
Arrays.sort(result, fieldSortingMethod);
return result;
} else {
// The super classes of this class do not have any instance fields.
@ -781,23 +799,19 @@ final class HotSpotResolvedObjectTypeImpl extends HotSpotResolvedJavaType implem
System.arraycopy(prepend, 0, result, 0, prependLength);
}
// Fields of the current class can be interleaved with fields of its super-classes
// but the array of fields to be returned must be sorted by increasing offset
// This code populates the array, then applies the sorting function
int resultIndex = prependLength;
for (int i = 0; i < index; ++i) {
FieldInfo field = new FieldInfo(i);
if (field.isStatic() == retrieveStaticFields) {
int offset = field.getOffset();
HotSpotResolvedJavaField resolvedJavaField = createField(field.getType(), offset, field.getAccessFlags(), i);
// Make sure the result is sorted by offset.
int j;
for (j = resultIndex - 1; j >= prependLength && result[j].getOffset() > offset; j--) {
result[j + 1] = result[j];
}
result[j + 1] = resolvedJavaField;
resultIndex++;
result[resultIndex++] = resolvedJavaField;
}
}
Arrays.sort(result, fieldSortingMethod);
return result;
}

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -101,7 +101,7 @@ class HotSpotVMConfig extends HotSpotVMConfigAccess {
final int instanceKlassConstantsOffset = getFieldOffset("InstanceKlass::_constants", Integer.class, "ConstantPool*");
final int instanceKlassFieldsOffset = getFieldOffset("InstanceKlass::_fields", Integer.class, "Array<u2>*");
final int instanceKlassAnnotationsOffset = getFieldOffset("InstanceKlass::_annotations", Integer.class, "Annotations*");
final int instanceKlassMiscFlagsOffset = getFieldOffset("InstanceKlass::_misc_flags", Integer.class, "u2");
final int instanceKlassMiscFlagsOffset = getFieldOffset("InstanceKlass::_misc_flags", Integer.class, "u4");
final int klassVtableStartOffset = getFieldValue("CompilerToVM::Data::Klass_vtable_start_offset", Integer.class, "int");
final int klassVtableLengthOffset = getFieldValue("CompilerToVM::Data::Klass_vtable_length_offset", Integer.class, "int");

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -46,6 +46,7 @@ public class VMDeprecatedOptions {
{"InitialRAMFraction", "64"},
{"TLABStats", "false"},
{"AllowRedefinitionToAddDeleteMethods", "true"},
{"UseNewFieldLayout", "true"},
// deprecated alias flags (see also aliased_jvm_flags):
{"DefaultMaxRAMFraction", "4"},

@ -0,0 +1,157 @@
/*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 8237767
* @summary Verify behaviour of field layout algorithm
* @library /test/lib
* @modules java.base/jdk.internal.misc
* java.management
* @run main/othervm FieldDensityTest
*/
/*
* @test
* @requires vm.bits == "64"
* @library /test/lib
* @modules java.base/jdk.internal.misc
* java.management
* @run main/othervm -XX:+UseCompressedOops -XX:+UseCompressedClassPointers FieldDensityTest
* @run main/othervm -XX:+UseCompressedOops -XX:-UseCompressedClassPointers FieldDensityTest
*/
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Comparator;
import jdk.internal.misc.Unsafe;
import jdk.test.lib.Asserts;
public class FieldDensityTest {
static int OOP_SIZE_IN_BYTES = 0;
static {
if (System.getProperty("sun.arch.data.model").equals("64")) {
if (System.getProperty("java.vm.compressedOopsMode") == null) {
OOP_SIZE_IN_BYTES = 8;
} else {
OOP_SIZE_IN_BYTES = 4;
}
} else {
OOP_SIZE_IN_BYTES = 4;
}
}
static class FieldInfo {
public Field field;
public long offset;
FieldInfo(Field field, long offset) {
this.field = field;
this.offset = offset;
}
static void checkFieldsContiguity(FieldInfo[] fieldInfo) {
Arrays.sort(fieldInfo, new SortByOffset());
for (int i = 0 ; i < fieldInfo.length - 2; i++) {
int size = sizeInBytesFromType(fieldInfo[i].field.getType());
Asserts.assertEquals((int)(fieldInfo[i].offset + size), (int)fieldInfo[i+1].offset,
"Empty slot between fields, should not happen");
}
}
}
static int sizeInBytesFromType(Class type) {
if (!type.isPrimitive()) {
return OOP_SIZE_IN_BYTES;
}
switch(type.getTypeName()) {
case "boolean":
case "byte": return 1;
case "char":
case "short": return 2;
case "int":
case "float": return 4;
case "long":
case "double": return 8;
default:
throw new RuntimeException("Unrecognized signature");
}
}
static class SortByOffset implements Comparator<FieldInfo> {
public int compare(FieldInfo a, FieldInfo b)
{
return (int)(a.offset - b.offset);
}
}
static class E {
public byte b0;
}
static class F extends E {
public byte b1;
}
static class G extends F {
public byte b2;
}
static class H extends G {
public byte b3;
}
public static class A {
public int i;
public byte b;
public long l;
public Object o;
}
public static class B extends A {
public byte b0, b1, b2;
}
static void testFieldsContiguity(Class c) {
Unsafe unsafe = Unsafe.getUnsafe();
Field[] fields = c.getFields();
FieldInfo[] fieldsInfo = new FieldInfo[fields.length];
int i = 0;
for (Field f : fields) {
long offset = unsafe.objectFieldOffset(f);
fieldsInfo[i] = new FieldInfo(f, offset);
i++;
}
FieldInfo.checkFieldsContiguity(fieldsInfo);
}
public static void main(String[] args) {
H h = new H();
testFieldsContiguity(h.getClass());
B b = new B();
testFieldsContiguity(b.getClass());
}
}