8295475: Move non-resource allocation strategies out of ResourceObj

Reviewed-by: coleenp, stuefe, rehn, dholmes
This commit is contained in:
Stefan Karlsson 2022-11-10 08:31:37 +00:00
parent e802b124b7
commit bfc5816595
164 changed files with 444 additions and 415 deletions

View File

@ -2773,8 +2773,6 @@ public:
}
Assembler(CodeBuffer* code) : AbstractAssembler(code), _in_compressible_region(true) {}
virtual ~Assembler() {}
};
#endif // CPU_RISCV_ASSEMBLER_RISCV_HPP

View File

@ -56,7 +56,6 @@ class InterpreterMacroAssembler: public MacroAssembler {
public:
InterpreterMacroAssembler(CodeBuffer* code) : MacroAssembler(code) {}
virtual ~InterpreterMacroAssembler() {}
void load_earlyret_value(TosState state);

View File

@ -52,9 +52,6 @@ class SignatureHandlerGenerator: public NativeSignatureIterator {
public:
// Creation
SignatureHandlerGenerator(const methodHandle& method, CodeBuffer* buffer);
virtual ~SignatureHandlerGenerator() {
_masm = NULL;
}
// Code generation
void generate(uint64_t fingerprint);

View File

@ -44,8 +44,6 @@ class MacroAssembler: public Assembler {
public:
MacroAssembler(CodeBuffer* code) : Assembler(code) {}
virtual ~MacroAssembler() {}
void safepoint_poll(Label& slow_path, bool at_return, bool acquire, bool in_nmethod);
// Alignment

View File

@ -2917,10 +2917,10 @@ bool os::Linux::libnuma_init() {
set_numa_interleave_bitmask(_numa_get_interleave_mask());
set_numa_membind_bitmask(_numa_get_membind());
// Create an index -> node mapping, since nodes are not always consecutive
_nindex_to_node = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<int>(0, mtInternal);
_nindex_to_node = new (mtInternal) GrowableArray<int>(0, mtInternal);
rebuild_nindex_to_node_map();
// Create a cpu -> node mapping
_cpu_to_node = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<int>(0, mtInternal);
_cpu_to_node = new (mtInternal) GrowableArray<int>(0, mtInternal);
rebuild_cpu_to_node_map();
return true;
}

View File

@ -2019,7 +2019,7 @@ void ArchDesc::defineStateClass(FILE *fp) {
fprintf(fp,"// indexed by machine operand opcodes, pointers to the children in the label\n");
fprintf(fp,"// tree generated by the Label routines in ideal nodes (currently limited to\n");
fprintf(fp,"// two for convenience, but this could change).\n");
fprintf(fp,"class State : public ResourceObj {\n");
fprintf(fp,"class State : public ArenaObj {\n");
fprintf(fp,"private:\n");
fprintf(fp," unsigned int _cost[_LAST_MACH_OPER]; // Costs, indexed by operand opcodes\n");
fprintf(fp," uint16_t _rule[_LAST_MACH_OPER]; // Rule and validity, indexed by operand opcodes\n");

View File

@ -139,8 +139,6 @@ CodeBuffer::~CodeBuffer() {
}
NOT_PRODUCT(clear_strings());
assert(_default_oop_recorder.allocated_on_stack_or_embedded(), "should be embedded object");
}
void CodeBuffer::initialize_oop_recorder(OopRecorder* r) {

View File

@ -380,7 +380,7 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
// CodeBuffers must be allocated on the stack except for a single
// special case during expansion which is handled internally. This
// is done to guarantee proper cleanup of resources.
void* operator new(size_t size) throw() { return ResourceObj::operator new(size); }
void* operator new(size_t size) throw() { return resource_allocate_bytes(size); }
void operator delete(void* p) { ShouldNotCallThis(); }
public:

View File

@ -65,7 +65,7 @@ CFGPrinterOutput::CFGPrinterOutput(Compilation* compilation)
char file_name[O_BUFLEN];
jio_snprintf(file_name, sizeof(file_name), "output_tid" UINTX_FORMAT "_pid%u.cfg",
os::current_thread_id(), os::current_process_id());
_output = new(mtCompiler) fileStream(file_name, "at");
_output = new (mtCompiler) fileStream(file_name, "at");
}
void CFGPrinterOutput::inc_indent() {

View File

@ -2511,12 +2511,12 @@ void LinearScan::compute_oop_map(IntervalWalker* iw, const LIR_OpVisitState &vis
// Allocate them with new so they are never destroyed (otherwise, a
// forced exit could destroy these objects while they are still in
// use).
ConstantOopWriteValue* LinearScan::_oop_null_scope_value = new (ResourceObj::C_HEAP, mtCompiler) ConstantOopWriteValue(NULL);
ConstantIntValue* LinearScan::_int_m1_scope_value = new (ResourceObj::C_HEAP, mtCompiler) ConstantIntValue(-1);
ConstantIntValue* LinearScan::_int_0_scope_value = new (ResourceObj::C_HEAP, mtCompiler) ConstantIntValue((jint)0);
ConstantIntValue* LinearScan::_int_1_scope_value = new (ResourceObj::C_HEAP, mtCompiler) ConstantIntValue(1);
ConstantIntValue* LinearScan::_int_2_scope_value = new (ResourceObj::C_HEAP, mtCompiler) ConstantIntValue(2);
LocationValue* _illegal_value = new (ResourceObj::C_HEAP, mtCompiler) LocationValue(Location());
ConstantOopWriteValue* LinearScan::_oop_null_scope_value = new (mtCompiler) ConstantOopWriteValue(NULL);
ConstantIntValue* LinearScan::_int_m1_scope_value = new (mtCompiler) ConstantIntValue(-1);
ConstantIntValue* LinearScan::_int_0_scope_value = new (mtCompiler) ConstantIntValue((jint)0);
ConstantIntValue* LinearScan::_int_1_scope_value = new (mtCompiler) ConstantIntValue(1);
ConstantIntValue* LinearScan::_int_2_scope_value = new (mtCompiler) ConstantIntValue(2);
LocationValue* _illegal_value = new (mtCompiler) LocationValue(Location());
void LinearScan::init_compute_debug_info() {
// cache for frequently used scope values

View File

@ -60,7 +60,7 @@ ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
_total_bytes = 0;
_objs = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
_objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
}
ArchiveBuilder::SourceObjList::~SourceObjList() {
@ -165,9 +165,9 @@ ArchiveBuilder::ArchiveBuilder() :
_estimated_metaspaceobj_bytes(0),
_estimated_hashtable_bytes(0)
{
_klasses = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
_symbols = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
_special_refs = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<SpecialRefInfo>(24 * K, mtClassShared);
_klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
_symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
_special_refs = new (mtClassShared) GrowableArray<SpecialRefInfo>(24 * K, mtClassShared);
assert(_current == NULL, "must be");
_current = this;

View File

@ -226,8 +226,8 @@ private:
SourceObjList _rw_src_objs; // objs to put in rw region
SourceObjList _ro_src_objs; // objs to put in ro region
ResizeableResourceHashtable<address, SourceObjInfo, ResourceObj::C_HEAP, mtClassShared> _src_obj_table;
ResizeableResourceHashtable<address, address, ResourceObj::C_HEAP, mtClassShared> _buffered_to_src_table;
ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
GrowableArray<Klass*>* _klasses;
GrowableArray<Symbol*>* _symbols;
GrowableArray<SpecialRefInfo>* _special_refs;

View File

@ -49,7 +49,7 @@ class CDSHeapVerifier : public KlassClosure {
ResourceHashtable<oop, StaticFieldInfo,
15889, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
HeapShared::oop_hash> _table;

View File

@ -73,8 +73,8 @@ ClassListParser::ClassListParser(const char* file, ParseMode parse_mode) : _id2k
vm_exit_during_initialization("Loading classlist failed", errmsg);
}
_line_no = 0;
_interfaces = new (ResourceObj::C_HEAP, mtClass) GrowableArray<int>(10, mtClass);
_indy_items = new (ResourceObj::C_HEAP, mtClass) GrowableArray<const char*>(9, mtClass);
_interfaces = new (mtClass) GrowableArray<int>(10, mtClass);
_indy_items = new (mtClass) GrowableArray<const char*>(9, mtClass);
_parse_mode = parse_mode;
// _instance should only be accessed by the thread that created _instance.

View File

@ -76,7 +76,7 @@ public:
private:
// Must be C_HEAP allocated -- we don't want nested resource allocations.
typedef ResizeableResourceHashtable<int, InstanceKlass*,
ResourceObj::C_HEAP, mtClassShared> ID2KlassTable;
AnyObj::C_HEAP, mtClassShared> ID2KlassTable;
enum {
_unspecified = -999,

View File

@ -67,7 +67,7 @@ void ClassListWriter::write(const InstanceKlass* k, const ClassFileStream* cfs)
class ClassListWriter::IDTable : public ResourceHashtable<
const InstanceKlass*, int,
15889, // prime number
ResourceObj::C_HEAP> {};
AnyObj::C_HEAP> {};
ClassListWriter::IDTable* ClassListWriter::_id_table = NULL;
int ClassListWriter::_total_ids = 0;
@ -75,7 +75,7 @@ int ClassListWriter::_total_ids = 0;
int ClassListWriter::get_id(const InstanceKlass* k) {
assert_locked();
if (_id_table == NULL) {
_id_table = new (ResourceObj::C_HEAP, mtClass)IDTable();
_id_table = new (mtClass)IDTable();
}
bool created;
int* v = _id_table->put_if_absent(k, &created);

View File

@ -57,8 +57,8 @@ void ClassPrelinker::add_one_vm_class(InstanceKlass* ik) {
void ClassPrelinker::initialize() {
assert(_vm_classes == NULL, "must be");
_vm_classes = new (ResourceObj::C_HEAP, mtClass)ClassesTable();
_processed_classes = new (ResourceObj::C_HEAP, mtClass)ClassesTable();
_vm_classes = new (mtClass)ClassesTable();
_processed_classes = new (mtClass)ClassesTable();
for (auto id : EnumRange<vmClassID>{}) {
add_one_vm_class(vmClasses::klass_at(id));
}

View File

@ -49,7 +49,7 @@ class Klass;
// at dump time, because at run time we will load a class from the CDS archive only
// if all of its supertypes are loaded from the CDS archive.
class ClassPrelinker : AllStatic {
using ClassesTable = ResourceHashtable<InstanceKlass*, bool, 15889, ResourceObj::C_HEAP, mtClassShared> ;
using ClassesTable = ResourceHashtable<InstanceKlass*, bool, 15889, AnyObj::C_HEAP, mtClassShared> ;
static ClassesTable* _processed_classes;
static ClassesTable* _vm_classes;

View File

@ -56,8 +56,8 @@ DumpTimeClassInfo::DumpTimeClassInfo(const DumpTimeClassInfo& src) {
{
int n = src.num_verifier_constraints();
if (n > 0) {
_verifier_constraints = new (ResourceObj::C_HEAP, mtClass) GrowableArray<DTVerifierConstraint>(n, mtClass);
_verifier_constraint_flags = new (ResourceObj::C_HEAP, mtClass) GrowableArray<char>(n, mtClass);
_verifier_constraints = new (mtClass) GrowableArray<DTVerifierConstraint>(n, mtClass);
_verifier_constraint_flags = new (mtClass) GrowableArray<char>(n, mtClass);
for (int i = 0; i < n; i++) {
_verifier_constraints->append(src._verifier_constraints->at(i));
_verifier_constraint_flags->append(src._verifier_constraint_flags->at(i));
@ -68,7 +68,7 @@ DumpTimeClassInfo::DumpTimeClassInfo(const DumpTimeClassInfo& src) {
{
int n = src.num_loader_constraints();
if (n > 0) {
_loader_constraints = new (ResourceObj::C_HEAP, mtClass) GrowableArray<DTLoaderConstraint>(n, mtClass);
_loader_constraints = new (mtClass) GrowableArray<DTLoaderConstraint>(n, mtClass);
for (int i = 0; i < n; i++) {
_loader_constraints->append(src._loader_constraints->at(i));
}
@ -96,10 +96,10 @@ size_t DumpTimeClassInfo::runtime_info_bytesize() const {
void DumpTimeClassInfo::add_verification_constraint(InstanceKlass* k, Symbol* name,
Symbol* from_name, bool from_field_is_protected, bool from_is_array, bool from_is_object) {
if (_verifier_constraints == NULL) {
_verifier_constraints = new (ResourceObj::C_HEAP, mtClass) GrowableArray<DTVerifierConstraint>(4, mtClass);
_verifier_constraints = new (mtClass) GrowableArray<DTVerifierConstraint>(4, mtClass);
}
if (_verifier_constraint_flags == NULL) {
_verifier_constraint_flags = new (ResourceObj::C_HEAP, mtClass) GrowableArray<char>(4, mtClass);
_verifier_constraint_flags = new (mtClass) GrowableArray<char>(4, mtClass);
}
GrowableArray<DTVerifierConstraint>* vc_array = _verifier_constraints;
for (int i = 0; i < vc_array->length(); i++) {
@ -141,7 +141,7 @@ void DumpTimeClassInfo::record_linking_constraint(Symbol* name, Handle loader1,
assert(loader1 != loader2, "sanity");
LogTarget(Info, class, loader, constraints) log;
if (_loader_constraints == NULL) {
_loader_constraints = new (ResourceObj::C_HEAP, mtClass) GrowableArray<DTLoaderConstraint>(4, mtClass);
_loader_constraints = new (mtClass) GrowableArray<DTLoaderConstraint>(4, mtClass);
}
char lt1 = get_loader_type_by(loader1());
char lt2 = get_loader_type_by(loader2());
@ -173,7 +173,7 @@ void DumpTimeClassInfo::record_linking_constraint(Symbol* name, Handle loader1,
void DumpTimeClassInfo::add_enum_klass_static_field(int archived_heap_root_index) {
if (_enum_klass_static_fields == NULL) {
_enum_klass_static_fields = new (ResourceObj::C_HEAP, mtClass) GrowableArray<int>(20, mtClass);
_enum_klass_static_fields = new (mtClass) GrowableArray<int>(20, mtClass);
}
_enum_klass_static_fields->append(archived_heap_root_index);
}

View File

@ -233,7 +233,7 @@ using DumpTimeSharedClassTableBaseType = ResourceHashtable<
InstanceKlass*,
DumpTimeClassInfo,
15889, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
&DumpTimeSharedClassTable_hash>;

View File

@ -643,7 +643,7 @@ void FileMapInfo::record_non_existent_class_path_entry(const char* path) {
Arguments::assert_is_dumping_archive();
log_info(class, path)("non-existent Class-Path entry %s", path);
if (_non_existent_class_paths == NULL) {
_non_existent_class_paths = new (ResourceObj::C_HEAP, mtClass)GrowableArray<const char*>(10, mtClass);
_non_existent_class_paths = new (mtClass) GrowableArray<const char*>(10, mtClass);
}
_non_existent_class_paths->append(os::strdup(path));
}

View File

@ -621,7 +621,7 @@ void KlassSubGraphInfo::add_subgraph_entry_field(
assert(DumpSharedSpaces, "dump time only");
if (_subgraph_entry_fields == NULL) {
_subgraph_entry_fields =
new(ResourceObj::C_HEAP, mtClass) GrowableArray<int>(10, mtClass);
new (mtClass) GrowableArray<int>(10, mtClass);
}
_subgraph_entry_fields->append(static_field_offset);
_subgraph_entry_fields->append(HeapShared::append_root(v));
@ -635,7 +635,7 @@ void KlassSubGraphInfo::add_subgraph_object_klass(Klass* orig_k) {
if (_subgraph_object_klasses == NULL) {
_subgraph_object_klasses =
new(ResourceObj::C_HEAP, mtClass) GrowableArray<Klass*>(50, mtClass);
new (mtClass) GrowableArray<Klass*>(50, mtClass);
}
assert(ArchiveBuilder::current()->is_in_buffer_space(buffered_k), "must be a shared class");
@ -1587,7 +1587,7 @@ void HeapShared::init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[],
void HeapShared::init_subgraph_entry_fields(TRAPS) {
assert(HeapShared::can_write(), "must be");
_dump_time_subgraph_info_table = new (ResourceObj::C_HEAP, mtClass)DumpTimeKlassSubGraphInfoTable();
_dump_time_subgraph_info_table = new (mtClass)DumpTimeKlassSubGraphInfoTable();
init_subgraph_entry_fields(closed_archive_subgraph_entry_fields, CHECK);
init_subgraph_entry_fields(open_archive_subgraph_entry_fields, CHECK);
if (MetaspaceShared::use_full_module_graph()) {
@ -1659,7 +1659,7 @@ bool HeapShared::is_a_test_class_in_unnamed_module(Klass* ik) {
void HeapShared::init_for_dumping(TRAPS) {
if (HeapShared::can_write()) {
setup_test_class(ArchiveHeapTestClass);
_dumped_interned_strings = new (ResourceObj::C_HEAP, mtClass)DumpedInternedStrings();
_dumped_interned_strings = new (mtClass)DumpedInternedStrings();
_native_pointers = new GrowableArrayCHeap<Metadata**, mtClassShared>(2048);
init_subgraph_entry_fields(CHECK);
}

View File

@ -183,14 +183,14 @@ private:
typedef ResourceHashtable<oop, CachedOopInfo,
36137, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
HeapShared::oop_hash> ArchivedObjectCache;
static ArchivedObjectCache* _archived_object_cache;
typedef ResourceHashtable<oop, oop,
36137, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
HeapShared::oop_hash> OriginalObjectTable;
static OriginalObjectTable* _original_object_table;
@ -198,7 +198,7 @@ private:
class DumpTimeKlassSubGraphInfoTable
: public ResourceHashtable<Klass*, KlassSubGraphInfo,
137, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
DumpTimeSharedClassTable_hash> {
public:
@ -254,7 +254,7 @@ private:
typedef ResourceHashtable<oop, bool,
15889, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
HeapShared::oop_hash> SeenObjectsTable;
@ -265,7 +265,7 @@ private:
static void init_seen_objects_table() {
assert(_seen_objects_table == NULL, "must be");
_seen_objects_table = new (ResourceObj::C_HEAP, mtClass)SeenObjectsTable();
_seen_objects_table = new (mtClass)SeenObjectsTable();
}
static void delete_seen_objects_table() {
assert(_seen_objects_table != NULL, "must be");
@ -318,10 +318,10 @@ private:
static void reset_archived_object_states(TRAPS);
static void create_archived_object_cache(bool create_orig_table) {
_archived_object_cache =
new (ResourceObj::C_HEAP, mtClass)ArchivedObjectCache();
new (mtClass)ArchivedObjectCache();
if (create_orig_table) {
_original_object_table =
new (ResourceObj::C_HEAP, mtClass)OriginalObjectTable();
new (mtClass)OriginalObjectTable();
} else {
_original_object_table = NULL;
}
@ -426,7 +426,7 @@ private:
class DumpedInternedStrings :
public ResourceHashtable<oop, bool,
15889, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
HeapShared::string_oop_hash>
{};

View File

@ -33,7 +33,7 @@ DumpTimeLambdaProxyClassInfo::DumpTimeLambdaProxyClassInfo(const DumpTimeLambdaP
_proxy_klasses = NULL;
if (src._proxy_klasses != NULL && src._proxy_klasses->length() > 0) {
int n = src._proxy_klasses->length();
_proxy_klasses = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<InstanceKlass*>(n, mtClassShared);
_proxy_klasses = new (mtClassShared) GrowableArray<InstanceKlass*>(n, mtClassShared);
for (int i = 0; i < n; i++) {
_proxy_klasses->append(src._proxy_klasses->at(i));
}

View File

@ -114,7 +114,7 @@ public:
void add_proxy_klass(InstanceKlass* proxy_klass) {
if (_proxy_klasses == NULL) {
_proxy_klasses = new (ResourceObj::C_HEAP, mtClassShared) GrowableArray<InstanceKlass*>(5, mtClassShared);
_proxy_klasses = new (mtClassShared) GrowableArray<InstanceKlass*>(5, mtClassShared);
}
assert(_proxy_klasses != NULL, "sanity");
_proxy_klasses->append(proxy_klass);
@ -160,7 +160,7 @@ class DumpTimeLambdaProxyClassDictionary
: public ResourceHashtable<LambdaProxyClassKey,
DumpTimeLambdaProxyClassInfo,
137, // prime number
ResourceObj::C_HEAP,
AnyObj::C_HEAP,
mtClassShared,
LambdaProxyClassKey::DUMPTIME_HASH,
LambdaProxyClassKey::DUMPTIME_EQUALS> {

View File

@ -156,7 +156,7 @@ class DumpClassListCLDClosure : public CLDClosure {
fileStream *_stream;
ResizeableResourceHashtable<InstanceKlass*, bool,
ResourceObj::C_HEAP, mtClassShared> _dumped_classes;
AnyObj::C_HEAP, mtClassShared> _dumped_classes;
void dump(InstanceKlass* ik) {
bool created;

View File

@ -75,7 +75,7 @@ InstanceKlass* UnregisteredClasses::load_class(Symbol* name, const char* path, T
class URLClassLoaderTable : public ResourceHashtable<
Symbol*, OopHandle,
137, // prime number
ResourceObj::C_HEAP> {};
AnyObj::C_HEAP> {};
static URLClassLoaderTable* _url_classloader_table = NULL;
@ -102,7 +102,7 @@ Handle UnregisteredClasses::create_url_classloader(Symbol* path, TRAPS) {
Handle UnregisteredClasses::get_url_classloader(Symbol* path, TRAPS) {
if (_url_classloader_table == NULL) {
_url_classloader_table = new (ResourceObj::C_HEAP, mtClass)URLClassLoaderTable();
_url_classloader_table = new (mtClass)URLClassLoaderTable();
}
OopHandle* url_classloader_ptr = _url_classloader_table->get(path);
if (url_classloader_ptr != NULL) {

View File

@ -42,7 +42,7 @@
class ciMethodBlocks;
class ciBlock;
class BCEscapeAnalyzer : public ResourceObj {
class BCEscapeAnalyzer : public ArenaObj {
private:
Arena* _arena; // ciEnv arena

View File

@ -46,7 +46,7 @@
// the distinction between `Klass*' and `Klass' are not
// reflected in the interface and instead the Klass hierarchy
// is directly modeled as the subclasses of ciKlass.
class ciBaseObject : public ResourceObj {
class ciBaseObject : public ArenaObj {
CI_PACKAGE_ACCESS
friend class ciEnv;

View File

@ -33,7 +33,7 @@
// The class caches indexed constant pool lookups.
//
// Usage note: this klass has nothing to do with ConstantPoolCache*.
class ciConstantPoolCache : public ResourceObj {
class ciConstantPoolCache : public ArenaObj {
private:
GrowableArray<int>* _keys;
GrowableArray<void*>* _elements;

View File

@ -31,7 +31,7 @@
// ciExceptionHandler
//
// This class represents an exception handler for a method.
class ciExceptionHandler : public ResourceObj {
class ciExceptionHandler : public AnyObj {
private:
friend class ciMethod;

View File

@ -36,7 +36,7 @@
// This class represents the result of a field lookup in the VM.
// The lookup may not succeed, in which case the information in
// the ciField will be incomplete.
class ciField : public ResourceObj {
class ciField : public ArenaObj {
CI_PACKAGE_ACCESS
friend class ciEnv;
friend class ciInstanceKlass;

View File

@ -32,7 +32,7 @@
class ciBlock;
class ciMethodBlocks : public ResourceObj {
class ciMethodBlocks : public ArenaObj {
private:
ciMethod *_method;
Arena *_arena;
@ -60,7 +60,7 @@ public:
#endif
};
class ciBlock : public ResourceObj {
class ciBlock : public ArenaObj {
private:
int _idx;
int _start_bci;

View File

@ -611,6 +611,10 @@ uint ciMethodData::arg_modified(int arg) const {
return aid->arg_modified(arg);
}
ciParametersTypeData* ciMethodData::parameters_type_data() const {
return _parameters != NULL ? new ciParametersTypeData(_parameters) : NULL;
}
ByteSize ciMethodData::offset_of_slot(ciProfileData* data, ByteSize slot_offset_in_data) {
// Get offset within MethodData* of the data array
ByteSize data_offset = MethodData::data_offset();

View File

@ -565,9 +565,7 @@ public:
bool is_arg_returned(int i) const;
uint arg_modified(int arg) const;
ciParametersTypeData* parameters_type_data() const {
return _parameters != NULL ? new ciParametersTypeData(_parameters) : NULL;
}
ciParametersTypeData* parameters_type_data() const;
// Code generation helper
ByteSize offset_of_slot(ciProfileData* data, ByteSize slot_offset_in_data);

View File

@ -36,7 +36,7 @@
// of ciObject and its subclasses. It contains a caching mechanism
// which ensures that for each oop, at most one ciObject is created.
// This invariant allows efficient implementation of ciObject.
class ciObjectFactory : public ResourceObj {
class ciObjectFactory : public ArenaObj {
friend class VMStructs;
friend class ciEnv;
@ -56,7 +56,7 @@ private:
int _next_ident;
public:
struct NonPermObject : public ResourceObj {
struct NonPermObject : public ArenaObj {
ciObject* _object;
NonPermObject* _next;

View File

@ -34,7 +34,7 @@
// ciSignature
//
// This class represents the signature of a method.
class ciSignature : public ResourceObj {
class ciSignature : public ArenaObj {
private:
ciSymbol* _symbol;
ciKlass* _accessing_klass;

View File

@ -3112,6 +3112,8 @@ void ciTypeFlow::record_failure(const char* reason) {
}
#ifndef PRODUCT
void ciTypeFlow::print() const { print_on(tty); }
// ------------------------------------------------------------------
// ciTypeFlow::print_on
void ciTypeFlow::print_on(outputStream* st) const {

View File

@ -32,7 +32,7 @@
#endif
class ciTypeFlow : public ResourceObj {
class ciTypeFlow : public ArenaObj {
private:
ciEnv* _env;
ciMethod* _method;
@ -66,7 +66,7 @@ public:
// Represents information about an "active" jsr call. This
// class represents a call to the routine at some entry address
// with some distinct return address.
class JsrRecord : public ResourceObj {
class JsrRecord : public ArenaObj {
private:
int _entry_address;
int _return_address;
@ -97,7 +97,7 @@ public:
//
// Note that different amounts of effort can be expended determining
// if paths are compatible. <DISCUSSION>
class JsrSet : public ResourceObj {
class JsrSet : public AnyObj {
private:
GrowableArray<JsrRecord*> _set;
@ -153,7 +153,7 @@ public:
// A StateVector summarizes the type information at some
// point in the program
class StateVector : public ResourceObj {
class StateVector : public AnyObj {
private:
ciType** _types;
int _stack_size;
@ -513,7 +513,7 @@ public:
};
// A basic block
class Block : public ResourceObj {
class Block : public ArenaObj {
private:
ciBlock* _ciblock;
GrowableArray<Block*>* _exceptions;
@ -707,7 +707,7 @@ public:
};
// Loop
class Loop : public ResourceObj {
class Loop : public ArenaObj {
private:
Loop* _parent;
Loop* _sibling; // List of siblings, null terminated
@ -926,6 +926,7 @@ public:
// Determine if bci is dominated by dom_bci
bool is_dominated_by(int bci, int dom_bci);
void print() const PRODUCT_RETURN;
void print_on(outputStream* st) const PRODUCT_RETURN;
void rpo_print_on(outputStream* st) const PRODUCT_RETURN;

View File

@ -125,7 +125,7 @@ class BytecodeCPEntry {
class BytecodeConstantPool : ResourceObj {
private:
typedef ResourceHashtable<BytecodeCPEntry, u2,
256, ResourceObj::RESOURCE_AREA, mtInternal,
256, AnyObj::RESOURCE_AREA, mtInternal,
&BytecodeCPEntry::hash, &BytecodeCPEntry::equals> IndexHash;
ConstantPool* _orig;

View File

@ -165,7 +165,6 @@ void ClassFileParser::parse_constant_pool_entries(const ClassFileStream* const s
const ClassFileStream cfs1 = *stream;
const ClassFileStream* const cfs = &cfs1;
assert(cfs->allocated_on_stack_or_embedded(), "should be local");
debug_only(const u1* const old_current = stream->current();)
// Used for batching symbol allocations.
@ -2137,7 +2136,7 @@ void ClassFileParser::copy_localvariable_table(const ConstMethod* cm,
ResourceMark rm(THREAD);
typedef ResourceHashtable<LocalVariableTableElement, LocalVariableTableElement*,
256, ResourceObj::RESOURCE_AREA, mtInternal,
256, AnyObj::RESOURCE_AREA, mtInternal,
&LVT_Hash::hash, &LVT_Hash::equals> LVT_HashTable;
LVT_HashTable* const table = new LVT_HashTable();

View File

@ -571,7 +571,7 @@ void ClassLoader::setup_patch_mod_entries() {
int num_of_entries = patch_mod_args->length();
// Set up the boot loader's _patch_mod_entries list
_patch_mod_entries = new (ResourceObj::C_HEAP, mtModule) GrowableArray<ModuleClassPathList*>(num_of_entries, mtModule);
_patch_mod_entries = new (mtModule) GrowableArray<ModuleClassPathList*>(num_of_entries, mtModule);
for (int i = 0; i < num_of_entries; i++) {
const char* module_name = (patch_mod_args->at(i))->module_name();
@ -1508,7 +1508,7 @@ void ClassLoader::classLoader_init2(JavaThread* current) {
// done before loading any classes, by the same thread that will
// subsequently do the first class load. So, no lock is needed for this.
assert(_exploded_entries == NULL, "Should only get initialized once");
_exploded_entries = new (ResourceObj::C_HEAP, mtModule)
_exploded_entries = new (mtModule)
GrowableArray<ModuleClassPathList*>(EXPLODED_ENTRY_SIZE, mtModule);
add_to_exploded_build_list(current, vmSymbols::java_base());
}

View File

@ -821,7 +821,7 @@ void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
if (!m->is_shared()) {
MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
if (_deallocate_list == NULL) {
_deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, mtClass);
_deallocate_list = new (mtClass) GrowableArray<Metadata*>(100, mtClass);
}
_deallocate_list->append_if_missing(m);
log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());

View File

@ -112,7 +112,7 @@ protected:
}
typedef ResourceHashtable<oop, ClassLoaderStats,
256, ResourceObj::RESOURCE_AREA, mtInternal,
256, AnyObj::RESOURCE_AREA, mtInternal,
ClassLoaderStatsClosure::oop_hash> StatsTable;
outputStream* _out;

View File

@ -51,7 +51,7 @@ CompactHashtableWriter::CompactHashtableWriter(int num_entries,
_num_entries_written = 0;
_buckets = NEW_C_HEAP_ARRAY(GrowableArray<Entry>*, _num_buckets, mtSymbol);
for (int i=0; i<_num_buckets; i++) {
_buckets[i] = new (ResourceObj::C_HEAP, mtSymbol) GrowableArray<Entry>(0, mtSymbol);
_buckets[i] = new (mtSymbol) GrowableArray<Entry>(0, mtSymbol);
}
_stats = stats;

View File

@ -84,7 +84,7 @@ void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) {
int size = type2aelembytes(type);
LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
if (_primitive_fields == NULL) {
_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
}
_primitive_fields->append(block);
}
@ -93,7 +93,7 @@ void FieldGroup::add_oop_field(AllFieldStream fs) {
int size = type2aelembytes(T_OBJECT);
LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
if (_oop_fields == NULL) {
_oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
_oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
}
_oop_fields->append(block);
_oop_count++;

View File

@ -965,11 +965,11 @@ void java_lang_Class::set_mirror_module_field(JavaThread* current, Klass* k, Han
// Statically allocate fixup lists because they always get created.
void java_lang_Class::allocate_fixup_lists() {
GrowableArray<Klass*>* mirror_list =
new (ResourceObj::C_HEAP, mtClass) GrowableArray<Klass*>(40, mtClass);
new (mtClass) GrowableArray<Klass*>(40, mtClass);
set_fixup_mirror_list(mirror_list);
GrowableArray<Klass*>* module_list =
new (ResourceObj::C_HEAP, mtModule) GrowableArray<Klass*>(500, mtModule);
new (mtModule) GrowableArray<Klass*>(500, mtModule);
set_fixup_module_field_list(module_list);
}
@ -1920,8 +1920,8 @@ oop java_lang_Thread::async_get_stack_trace(oop java_thread, TRAPS) {
// Pick minimum length that will cover most cases
int init_length = 64;
_methods = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<Method*>(init_length, mtInternal);
_bcis = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<int>(init_length, mtInternal);
_methods = new (mtInternal) GrowableArray<Method*>(init_length, mtInternal);
_bcis = new (mtInternal) GrowableArray<int>(init_length, mtInternal);
int total_count = 0;
for (vframeStream vfst(thread, false, false, carrier); // we don't process frames as we don't care about oops

View File

@ -49,7 +49,7 @@ class LoaderConstraint : public CHeapObj<mtClass> {
public:
LoaderConstraint(InstanceKlass* klass, oop class_loader1, oop class_loader2) :
_klass(klass) {
_loaders = new (ResourceObj::C_HEAP, mtClass) GrowableArray<ClassLoaderData*>(10, mtClass);
_loaders = new (mtClass) GrowableArray<ClassLoaderData*>(10, mtClass);
add_loader(class_loader1);
add_loader(class_loader2);
}
@ -89,7 +89,7 @@ class ConstraintSet { // copied into hashtable as
ConstraintSet& operator=(const ConstraintSet&) = delete;
void initialize(LoaderConstraint* constraint) {
_constraints = new (ResourceObj::C_HEAP, mtClass) GrowableArray<LoaderConstraint*>(5, mtClass);
_constraints = new (mtClass) GrowableArray<LoaderConstraint*>(5, mtClass);
_constraints->push(constraint);
}
@ -111,7 +111,7 @@ class ConstraintSet { // copied into hashtable as
};
ResourceHashtable<SymbolHandle, ConstraintSet, 107, ResourceObj::C_HEAP, mtClass, SymbolHandle::compute_hash> _loader_constraint_table;
ResourceHashtable<SymbolHandle, ConstraintSet, 107, AnyObj::C_HEAP, mtClass, SymbolHandle::compute_hash> _loader_constraint_table;
void LoaderConstraint::extend_loader_constraint(Symbol* class_name,
Handle loader,

View File

@ -163,7 +163,7 @@ void ModuleEntry::add_read(ModuleEntry* m) {
} else {
if (_reads == NULL) {
// Lazily create a module's reads list
_reads = new (ResourceObj::C_HEAP, mtModule) GrowableArray<ModuleEntry*>(MODULE_READS_SIZE, mtModule);
_reads = new (mtModule) GrowableArray<ModuleEntry*>(MODULE_READS_SIZE, mtModule);
}
// Determine, based on this newly established read edge to module m,
@ -383,7 +383,7 @@ typedef ResourceHashtable<
const ModuleEntry*,
ModuleEntry*,
557, // prime number
ResourceObj::C_HEAP> ArchivedModuleEntries;
AnyObj::C_HEAP> ArchivedModuleEntries;
static ArchivedModuleEntries* _archive_modules_entries = NULL;
ModuleEntry* ModuleEntry::allocate_archived_entry() const {
@ -392,7 +392,7 @@ ModuleEntry* ModuleEntry::allocate_archived_entry() const {
memcpy((void*)archived_entry, (void*)this, sizeof(ModuleEntry));
if (_archive_modules_entries == NULL) {
_archive_modules_entries = new (ResourceObj::C_HEAP, mtClass)ArchivedModuleEntries();
_archive_modules_entries = new (mtClass)ArchivedModuleEntries();
}
assert(_archive_modules_entries->get(this) == NULL, "Each ModuleEntry must not be shared across ModuleEntryTables");
_archive_modules_entries->put(this, archived_entry);
@ -428,7 +428,7 @@ GrowableArray<ModuleEntry*>* ModuleEntry::restore_growable_array(Array<ModuleEnt
GrowableArray<ModuleEntry*>* array = NULL;
int length = (archived_array == NULL) ? 0 : archived_array->length();
if (length > 0) {
array = new (ResourceObj::C_HEAP, mtModule)GrowableArray<ModuleEntry*>(length, mtModule);
array = new (mtModule) GrowableArray<ModuleEntry*>(length, mtModule);
for (int i = 0; i < length; i++) {
ModuleEntry* archived_entry = archived_array->at(i);
array->append(archived_entry);

View File

@ -206,7 +206,7 @@ class ModuleClosure: public StackObj {
class ModuleEntryTable : public CHeapObj<mtModule> {
private:
static ModuleEntry* _javabase_module;
ResourceHashtable<SymbolHandle, ModuleEntry*, 109, ResourceObj::C_HEAP, mtModule,
ResourceHashtable<SymbolHandle, ModuleEntry*, 109, AnyObj::C_HEAP, mtModule,
SymbolHandle::compute_hash> _table;
public:

View File

@ -80,7 +80,7 @@ void PackageEntry::add_qexport(ModuleEntry* m) {
if (!has_qual_exports_list()) {
// Lazily create a package's qualified exports list.
// Initial size is small, do not anticipate export lists to be large.
_qualified_exports = new (ResourceObj::C_HEAP, mtModule) GrowableArray<ModuleEntry*>(QUAL_EXP_SIZE, mtModule);
_qualified_exports = new (mtModule) GrowableArray<ModuleEntry*>(QUAL_EXP_SIZE, mtModule);
}
// Determine, based on this newly established export to module m,
@ -215,7 +215,7 @@ typedef ResourceHashtable<
const PackageEntry*,
PackageEntry*,
557, // prime number
ResourceObj::C_HEAP> ArchivedPackageEntries;
AnyObj::C_HEAP> ArchivedPackageEntries;
static ArchivedPackageEntries* _archived_packages_entries = NULL;
PackageEntry* PackageEntry::allocate_archived_entry() const {
@ -224,7 +224,7 @@ PackageEntry* PackageEntry::allocate_archived_entry() const {
memcpy((void*)archived_entry, (void*)this, sizeof(PackageEntry));
if (_archived_packages_entries == NULL) {
_archived_packages_entries = new (ResourceObj::C_HEAP, mtClass)ArchivedPackageEntries();
_archived_packages_entries = new (mtClass)ArchivedPackageEntries();
}
assert(_archived_packages_entries->get(this) == NULL, "Each PackageEntry must not be shared across PackageEntryTables");
_archived_packages_entries->put(this, archived_entry);

View File

@ -237,7 +237,7 @@ public:
// The PackageEntryTable is a Hashtable containing a list of all packages defined
// by a particular class loader. Each package is represented as a PackageEntry node.
class PackageEntryTable : public CHeapObj<mtModule> {
ResourceHashtable<SymbolHandle, PackageEntry*, 109, ResourceObj::C_HEAP, mtModule,
ResourceHashtable<SymbolHandle, PackageEntry*, 109, AnyObj::C_HEAP, mtModule,
SymbolHandle::compute_hash> _table;
public:
PackageEntryTable();

View File

@ -50,7 +50,7 @@ class PlaceholderKey {
};
const int _placeholder_table_size = 503; // Does this really have to be prime?
ResourceHashtable<PlaceholderKey, PlaceholderEntry, _placeholder_table_size, ResourceObj::C_HEAP, mtClass,
ResourceHashtable<PlaceholderKey, PlaceholderEntry, _placeholder_table_size, AnyObj::C_HEAP, mtClass,
PlaceholderKey::hash, PlaceholderKey::equals> _placeholders;
// SeenThread objects represent list of threads that are

View File

@ -51,7 +51,7 @@ bool ProtectionDomainCacheTable::equals(const WeakHandle& protection_domain1, co
// WeakHandle is both the key and the value. We need it as the key to compare the oops that each point to
// for equality. We need it as the value to return the one that already exists to link in the DictionaryEntry.
ResourceHashtable<WeakHandle, WeakHandle, 1009, ResourceObj::C_HEAP, mtClass,
ResourceHashtable<WeakHandle, WeakHandle, 1009, AnyObj::C_HEAP, mtClass,
ProtectionDomainCacheTable::compute_hash,
ProtectionDomainCacheTable::equals> _pd_cache_table;
@ -116,7 +116,7 @@ void ProtectionDomainCacheTable::unlink() {
// Create a list for holding deleted entries
if (_delete_list == NULL) {
_delete_list = new (ResourceObj::C_HEAP, mtClass)
_delete_list = new (mtClass)
GrowableArray<ProtectionDomainEntry*>(20, mtClass);
}

View File

@ -33,7 +33,7 @@
#include "runtime/mutexLocker.hpp"
#include "utilities/resourceHash.hpp"
ResourceHashtable<uintptr_t, ResolutionErrorEntry*, 107, ResourceObj::C_HEAP, mtClass> _resolution_error_table;
ResourceHashtable<uintptr_t, ResolutionErrorEntry*, 107, AnyObj::C_HEAP, mtClass> _resolution_error_table;
// create new error entry
void ResolutionErrorTable::add_entry(const constantPoolHandle& pool, int cp_index,
@ -162,4 +162,3 @@ void ResolutionErrorTable::purge_resolution_errors() {
ResolutionIteratePurgeErrors purgeErrorsIterator;
_resolution_error_table.unlink(&purgeErrorsIterator);
}

View File

@ -617,7 +617,7 @@ class VerifyCompStrings : StackObj {
}
ResizeableResourceHashtable<oop, bool,
ResourceObj::C_HEAP, mtInternal,
AnyObj::C_HEAP, mtInternal,
string_hash, string_equals> _table;
public:
size_t _errors;

View File

@ -113,9 +113,9 @@ class InvokeMethodKey : public StackObj {
};
ResourceHashtable<InvokeMethodKey, Method*, 139, ResourceObj::C_HEAP, mtClass,
ResourceHashtable<InvokeMethodKey, Method*, 139, AnyObj::C_HEAP, mtClass,
InvokeMethodKey::compute_hash, InvokeMethodKey::key_comparison> _invoke_method_intrinsic_table;
ResourceHashtable<SymbolHandle, OopHandle, 139, ResourceObj::C_HEAP, mtClass, SymbolHandle::compute_hash> _invoke_method_type_table;
ResourceHashtable<SymbolHandle, OopHandle, 139, AnyObj::C_HEAP, mtClass, SymbolHandle::compute_hash> _invoke_method_type_table;
OopHandle SystemDictionary::_java_system_loader;
OopHandle SystemDictionary::_java_platform_loader;

View File

@ -428,7 +428,7 @@ InstanceKlass* SystemDictionaryShared::find_or_load_shared_class(
class UnregisteredClassesTable : public ResourceHashtable<
Symbol*, InstanceKlass*,
15889, // prime number
ResourceObj::C_HEAP> {};
AnyObj::C_HEAP> {};
static UnregisteredClassesTable* _unregistered_classes_table = NULL;
@ -441,7 +441,7 @@ bool SystemDictionaryShared::add_unregistered_class(Thread* current, InstanceKla
MutexLocker ml(current, UnregisteredClassesTable_lock);
Symbol* name = klass->name();
if (_unregistered_classes_table == NULL) {
_unregistered_classes_table = new (ResourceObj::C_HEAP, mtClass)UnregisteredClassesTable();
_unregistered_classes_table = new (mtClass)UnregisteredClassesTable();
}
bool created;
InstanceKlass** v = _unregistered_classes_table->put_if_absent(name, klass, &created);
@ -500,9 +500,9 @@ void SystemDictionaryShared::set_shared_class_misc_info(InstanceKlass* k, ClassF
void SystemDictionaryShared::initialize() {
if (Arguments::is_dumping_archive()) {
_dumptime_table = new (ResourceObj::C_HEAP, mtClass) DumpTimeSharedClassTable;
_dumptime_table = new (mtClass) DumpTimeSharedClassTable;
_dumptime_lambda_proxy_class_dictionary =
new (ResourceObj::C_HEAP, mtClass) DumpTimeLambdaProxyClassDictionary;
new (mtClass) DumpTimeLambdaProxyClassDictionary;
}
}
@ -1497,7 +1497,7 @@ void SystemDictionaryShared::clone_dumptime_tables() {
assert_lock_strong(DumpTimeTable_lock);
assert(_cloned_dumptime_table == NULL, "_cloned_dumptime_table must be cleaned");
_cloned_dumptime_table = new (ResourceObj::C_HEAP, mtClass) DumpTimeSharedClassTable;
_cloned_dumptime_table = new (mtClass) DumpTimeSharedClassTable;
CloneDumpTimeClassTable copy_classes(_dumptime_table, _cloned_dumptime_table);
_dumptime_table->iterate_all_live_classes(&copy_classes);
_cloned_dumptime_table->update_counts();
@ -1505,7 +1505,7 @@ void SystemDictionaryShared::clone_dumptime_tables() {
assert(_cloned_dumptime_lambda_proxy_class_dictionary == NULL,
"_cloned_dumptime_lambda_proxy_class_dictionary must be cleaned");
_cloned_dumptime_lambda_proxy_class_dictionary =
new (ResourceObj::C_HEAP, mtClass) DumpTimeLambdaProxyClassDictionary;
new (mtClass) DumpTimeLambdaProxyClassDictionary;
CloneDumpTimeLambdaProxyClassTable copy_proxy_classes(_dumptime_lambda_proxy_class_dictionary,
_cloned_dumptime_lambda_proxy_class_dictionary);
_dumptime_lambda_proxy_class_dictionary->iterate(&copy_proxy_classes);

View File

@ -171,10 +171,10 @@ int CodeCache::_number_of_nmethods_with_dependencies = 0;
ExceptionCache* volatile CodeCache::_exception_cache_purge_list = NULL;
// Initialize arrays of CodeHeap subsets
GrowableArray<CodeHeap*>* CodeCache::_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_compiled_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_nmethod_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_allocable_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_heaps = new(mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_compiled_heaps = new(mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_nmethod_heaps = new(mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
GrowableArray<CodeHeap*>* CodeCache::_allocable_heaps = new(mtCode) GrowableArray<CodeHeap*> (static_cast<int>(CodeBlobType::All), mtCode);
void CodeCache::check_heap_sizes(size_t non_nmethod_size, size_t profiled_size, size_t non_profiled_size, size_t cache_size, bool all_set) {
size_t total_size = non_nmethod_size + profiled_size + non_profiled_size;
@ -1277,7 +1277,7 @@ static GrowableArray<CompiledMethod*>* old_compiled_method_table = NULL;
static void add_to_old_table(CompiledMethod* c) {
if (old_compiled_method_table == NULL) {
old_compiled_method_table = new (ResourceObj::C_HEAP, mtCode) GrowableArray<CompiledMethod*>(100, mtCode);
old_compiled_method_table = new (mtCode) GrowableArray<CompiledMethod*>(100, mtCode);
}
old_compiled_method_table->push(c);
}

View File

@ -45,7 +45,7 @@ class ConstantOopWriteValue;
class LocationValue;
class ObjectValue;
class ScopeValue: public ResourceObj {
class ScopeValue: public AnyObj {
public:
// Testers
virtual bool is_location() const { return false; }

View File

@ -2093,7 +2093,7 @@ void nmethod::check_all_dependencies(DepChange& changes) {
NOT_PRODUCT( FlagSetting fs(TraceDependencies, false) );
typedef ResourceHashtable<DependencySignature, int, 11027,
ResourceObj::RESOURCE_AREA, mtInternal,
AnyObj::RESOURCE_AREA, mtInternal,
&DependencySignature::hash,
&DependencySignature::equals> DepTable;

View File

@ -203,3 +203,11 @@ int ObjectLookup::find_index(jobject handle, OopRecorder* oop_recorder) {
}
return _values.at(location).index();
}
OopRecorder::OopRecorder(Arena* arena, bool deduplicate): _oops(arena), _metadata(arena) {
if (deduplicate) {
_object_lookup = new ObjectLookup();
} else {
_object_lookup = NULL;
}
}

View File

@ -91,7 +91,7 @@ template <class T> class ValueRecorder : public StackObj {
int maybe_find_index(T h);
// leaky hash table of handle => index, to help detect duplicate insertion
template <class X> class IndexCache : public ResourceObj {
template <class X> class IndexCache : public ArenaObj {
// This class is only used by the ValueRecorder class.
friend class ValueRecorder;
enum {
@ -181,13 +181,7 @@ class OopRecorder : public ResourceObj {
ValueRecorder<Metadata*> _metadata;
ObjectLookup* _object_lookup;
public:
OopRecorder(Arena* arena = NULL, bool deduplicate = false): _oops(arena), _metadata(arena) {
if (deduplicate) {
_object_lookup = new ObjectLookup();
} else {
_object_lookup = NULL;
}
}
OopRecorder(Arena* arena = NULL, bool deduplicate = false);
int allocate_oop_index(jobject h) {
return _oops.allocate_index(h);

View File

@ -90,7 +90,7 @@ int CompilerEvent::PhaseEvent::get_phase_id(const char* phase_name, bool may_exi
{
PhaseTypeGuard guard(sync);
if (phase_names == NULL) {
phase_names = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<const char*>(100, mtCompiler);
phase_names = new (mtInternal) GrowableArray<const char*>(100, mtCompiler);
register_jfr_serializer = true;
} else if (may_exist) {
index = lookup_phase(phase_name);

View File

@ -192,7 +192,7 @@ class decode_env {
typedef ResourceHashtable<
address, SourceFileInfo,
15889, // prime number
ResourceObj::C_HEAP> SourceFileInfoTable;
AnyObj::C_HEAP> SourceFileInfoTable;
static SourceFileInfoTable* _src_table;
static const char* _cached_src;
@ -200,7 +200,7 @@ class decode_env {
static SourceFileInfoTable& src_table() {
if (_src_table == NULL) {
_src_table = new (ResourceObj::C_HEAP, mtCode)SourceFileInfoTable();
_src_table = new (mtCode)SourceFileInfoTable();
}
return *_src_table;
}
@ -265,7 +265,7 @@ void decode_env::print_hook_comments(address pc, bool newline) {
}
_cached_src_lines->clear();
} else {
_cached_src_lines = new (ResourceObj::C_HEAP, mtCode)GrowableArray<const char*>(0, mtCode);
_cached_src_lines = new (mtCode) GrowableArray<const char*>(0, mtCode);
}
if ((fp = os::fopen(file, "r")) == NULL) {

View File

@ -49,11 +49,11 @@ class MethodLivenessResult : public ResourceBitMap {
bool is_valid() const { return _is_valid; }
};
class MethodLiveness : public ResourceObj {
class MethodLiveness : public ArenaObj {
public:
// The BasicBlock class is used to represent a basic block in the
// liveness analysis.
class BasicBlock : public ResourceObj {
class BasicBlock : public ArenaObj {
private:
// This class is only used by the MethodLiveness class.
friend class MethodLiveness;

View File

@ -249,7 +249,7 @@ protected:
HeapRegion* _allocation_region;
// Regions allocated for the current archive range.
GrowableArray<HeapRegion*> _allocated_regions;
GrowableArrayCHeap<HeapRegion*, mtGC> _allocated_regions;
// Current allocation window within the current region.
HeapWord* _bottom;
@ -265,9 +265,7 @@ public:
_open(open),
_g1h(g1h),
_allocation_region(NULL),
_allocated_regions((ResourceObj::set_allocation_type((address) &_allocated_regions,
ResourceObj::C_HEAP),
2), mtGC),
_allocated_regions(2),
_bottom(NULL),
_top(NULL),
_max(NULL) { }

View File

@ -1458,8 +1458,8 @@ G1CollectedHeap::G1CollectedHeap() :
_old_marking_cycles_completed(0),
_eden(),
_survivor(),
_gc_timer_stw(new (ResourceObj::C_HEAP, mtGC) STWGCTimer()),
_gc_tracer_stw(new (ResourceObj::C_HEAP, mtGC) G1NewTracer()),
_gc_timer_stw(new STWGCTimer()),
_gc_tracer_stw(new G1NewTracer()),
_policy(new G1Policy(_gc_timer_stw)),
_heap_sizing_policy(NULL),
_collection_set(this, _policy),

View File

@ -391,8 +391,8 @@ G1ConcurrentMark::G1ConcurrentMark(G1CollectedHeap* g1h,
_concurrent(false),
_has_aborted(false),
_restart_for_overflow(false),
_gc_timer_cm(new (ResourceObj::C_HEAP, mtGC) ConcurrentGCTimer()),
_gc_tracer_cm(new (ResourceObj::C_HEAP, mtGC) G1OldTracer()),
_gc_timer_cm(new ConcurrentGCTimer()),
_gc_tracer_cm(new G1OldTracer()),
// _verbose_level set below

View File

@ -33,7 +33,7 @@ G1FullGCCompactionPoint::G1FullGCCompactionPoint(G1FullCollector* collector) :
_collector(collector),
_current_region(nullptr),
_compaction_top(nullptr) {
_compaction_regions = new (ResourceObj::C_HEAP, mtGC) GrowableArray<HeapRegion*>(32, mtGC);
_compaction_regions = new (mtGC) GrowableArray<HeapRegion*>(32, mtGC);
_compaction_region_iterator = _compaction_regions->begin();
}

View File

@ -29,7 +29,7 @@
#include "utilities/debug.hpp"
G1SurvivorRegions::G1SurvivorRegions() :
_regions(new (ResourceObj::C_HEAP, mtGC) GrowableArray<HeapRegion*>(8, mtGC)),
_regions(new (mtGC) GrowableArray<HeapRegion*>(8, mtGC)),
_used_bytes(0),
_regions_on_node() {}

View File

@ -32,7 +32,7 @@ class G1EvacInfo;
class G1HeapSummary;
class G1EvacSummary;
class G1NewTracer : public YoungGCTracer {
class G1NewTracer : public YoungGCTracer, public CHeapObj<mtGC> {
G1GCPauseType _pause;
public:
@ -86,7 +86,7 @@ private:
bool prediction_active);
};
class G1OldTracer : public OldGCTracer {
class G1OldTracer : public OldGCTracer, public CHeapObj<mtGC> {
protected:
void report_gc_start_impl(GCCause::Cause cause, const Ticks& timestamp);
public:

View File

@ -39,7 +39,7 @@
#include "utilities/align.hpp"
MutableNUMASpace::MutableNUMASpace(size_t alignment) : MutableSpace(alignment), _must_use_large_pages(false) {
_lgrp_spaces = new (ResourceObj::C_HEAP, mtGC) GrowableArray<LGRPSpace*>(0, mtGC);
_lgrp_spaces = new (mtGC) GrowableArray<LGRPSpace*>(0, mtGC);
_page_size = os::vm_page_size();
_adaptation_cycles = 0;
_samples_count = 0;

View File

@ -60,7 +60,7 @@ ParCompactionManager::ParCompactionManager() {
reset_bitmap_query_cache();
_deferred_obj_array = new (ResourceObj::C_HEAP, mtGC) GrowableArray<HeapWord*>(10, mtGC);
_deferred_obj_array = new (mtGC) GrowableArray<HeapWord*>(10, mtGC);
}
void ParCompactionManager::initialize(ParMarkBitMap* mbm) {
@ -89,7 +89,7 @@ void ParCompactionManager::initialize(ParMarkBitMap* mbm) {
assert(ParallelScavengeHeap::heap()->workers().max_workers() != 0,
"Not initialized?");
_shadow_region_array = new (ResourceObj::C_HEAP, mtGC) GrowableArray<size_t >(10, mtGC);
_shadow_region_array = new (mtGC) GrowableArray<size_t >(10, mtGC);
_shadow_region_monitor = new Monitor(Mutex::nosafepoint, "CompactionManager_lock");
}

View File

@ -185,9 +185,9 @@ DefNewGeneration::DefNewGeneration(ReservedSpace rs,
_tenuring_threshold = MaxTenuringThreshold;
_pretenure_size_threshold_words = PretenureSizeThreshold >> LogHeapWordSize;
_gc_timer = new (ResourceObj::C_HEAP, mtGC) STWGCTimer();
_gc_timer = new STWGCTimer();
_gc_tracer = new (ResourceObj::C_HEAP, mtGC) DefNewTracer();
_gc_tracer = new DefNewTracer();
}
void DefNewGeneration::compute_space_boundaries(uintx minimum_eden_size,

View File

@ -214,7 +214,7 @@ void MarkSweep::KeepAliveClosure::do_oop(oop* p) { MarkSweep::KeepAliveClo
void MarkSweep::KeepAliveClosure::do_oop(narrowOop* p) { MarkSweep::KeepAliveClosure::do_oop_work(p); }
void MarkSweep::initialize() {
MarkSweep::_gc_timer = new (ResourceObj::C_HEAP, mtGC) STWGCTimer();
MarkSweep::_gc_tracer = new (ResourceObj::C_HEAP, mtGC) SerialOldTracer();
MarkSweep::_gc_timer = new STWGCTimer();
MarkSweep::_gc_tracer = new SerialOldTracer();
MarkSweep::_string_dedup_requests = new StringDedup::Requests();
}

View File

@ -114,7 +114,7 @@ GCPhase::PhaseType TimePartitions::current_phase_type() const {
}
TimePartitions::TimePartitions() {
_phases = new (ResourceObj::C_HEAP, mtGC) GrowableArray<GCPhase>(INITIAL_CAPACITY, mtGC);
_phases = new (mtGC) GrowableArray<GCPhase>(INITIAL_CAPACITY, mtGC);
clear();
}

View File

@ -136,7 +136,7 @@ class PhasesIterator {
virtual GCPhase* next() = 0;
};
class GCTimer : public ResourceObj {
class GCTimer {
friend class GCTimerTest;
protected:
Ticks _gc_start;
@ -159,13 +159,13 @@ class GCTimer : public ResourceObj {
TimePartitions* time_partitions() { return &_time_partitions; }
};
class STWGCTimer : public GCTimer {
class STWGCTimer : public GCTimer, public CHeapObj<mtGC> {
public:
virtual void register_gc_start(const Ticks& time = Ticks::now());
virtual void register_gc_end(const Ticks& time = Ticks::now());
};
class ConcurrentGCTimer : public GCTimer {
class ConcurrentGCTimer : public GCTimer, public CHeapObj<mtGC> {
public:
void register_gc_concurrent_start(const char* name, const Ticks& time = Ticks::now());
void register_gc_concurrent_end(const Ticks& time = Ticks::now());

View File

@ -91,7 +91,7 @@ class ParallelOldGCInfo {
void* dense_prefix() const { return _dense_prefix; }
};
class GCTracer : public ResourceObj {
class GCTracer {
protected:
SharedGCInfo _shared_gc_info;
@ -196,7 +196,7 @@ class ParallelOldTracer : public OldGCTracer {
void send_parallel_old_event() const;
};
class SerialOldTracer : public OldGCTracer {
class SerialOldTracer : public OldGCTracer, public CHeapObj<mtGC> {
public:
SerialOldTracer() : OldGCTracer(SerialOld) {}
};
@ -206,7 +206,7 @@ class ParallelScavengeTracer : public YoungGCTracer {
ParallelScavengeTracer() : YoungGCTracer(ParallelScavenge) {}
};
class DefNewTracer : public YoungGCTracer {
class DefNewTracer : public YoungGCTracer, public CHeapObj<mtGC> {
public:
DefNewTracer() : YoungGCTracer(DefNew) {}
};

View File

@ -29,7 +29,7 @@
#include "gc/shenandoah/c2/shenandoahSupport.hpp"
#include "utilities/growableArray.hpp"
class ShenandoahBarrierSetC2State : public ResourceObj {
class ShenandoahBarrierSetC2State : public ArenaObj {
private:
GrowableArray<ShenandoahIUBarrierNode*>* _iu_barriers;
GrowableArray<ShenandoahLoadReferenceBarrierNode*>* _load_reference_barriers;

View File

@ -44,7 +44,7 @@ ShenandoahCollectorPolicy::ShenandoahCollectorPolicy() :
Copy::zero_to_bytes(_degen_points, sizeof(size_t) * ShenandoahGC::_DEGENERATED_LIMIT);
_tracer = new (ResourceObj::C_HEAP, mtGC) ShenandoahTracer();
_tracer = new ShenandoahTracer();
}

View File

@ -31,7 +31,7 @@
#include "memory/allocation.hpp"
#include "utilities/ostream.hpp"
class ShenandoahTracer : public GCTracer {
class ShenandoahTracer : public GCTracer, public CHeapObj<mtGC> {
public:
ShenandoahTracer() : GCTracer(Shenandoah) {}
};

View File

@ -479,7 +479,7 @@ ShenandoahHeap::ShenandoahHeap(ShenandoahCollectorPolicy* policy) :
_memory_pool(NULL),
_stw_memory_manager("Shenandoah Pauses", "end of GC pause"),
_cycle_memory_manager("Shenandoah Cycles", "end of GC cycle"),
_gc_timer(new (ResourceObj::C_HEAP, mtGC) ConcurrentGCTimer()),
_gc_timer(new ConcurrentGCTimer()),
_soft_ref_policy(),
_log_min_obj_alignment_in_bytes(LogMinObjAlignmentInBytes),
_ref_processor(new ShenandoahReferenceProcessor(MAX2(_max_workers, 1U))),

View File

@ -44,7 +44,7 @@
#include "utilities/growableArray.hpp"
#include "utilities/macros.hpp"
class ZBarrierSetC2State : public ResourceObj {
class ZBarrierSetC2State : public ArenaObj {
private:
GrowableArray<ZLoadBarrierStubC2*>* _stubs;
Node_Array _live;

View File

@ -35,7 +35,7 @@ const uint8_t ZLoadBarrierWeak = 2;
const uint8_t ZLoadBarrierPhantom = 4;
const uint8_t ZLoadBarrierNoKeepalive = 8;
class ZLoadBarrierStubC2 : public ResourceObj {
class ZLoadBarrierStubC2 : public ArenaObj {
private:
const MachNode* _node;
const Address _ref_addr;

View File

@ -92,7 +92,7 @@ ZTracer::ZTracer() :
void ZTracer::initialize() {
assert(_tracer == NULL, "Already initialized");
_tracer = new (ResourceObj::C_HEAP, mtGC) ZTracer();
_tracer = new ZTracer();
JFR_ONLY(register_jfr_type_serializers());
}

View File

@ -30,7 +30,7 @@ class ZStatCounter;
class ZStatPhase;
class ZStatSampler;
class ZTracer : public GCTracer {
class ZTracer : public GCTracer, public CHeapObj<mtGC> {
private:
static ZTracer* _tracer;

View File

@ -1279,8 +1279,8 @@ void SignatureHandlerLibrary::initialize() {
SignatureHandlerLibrary::buffer_size);
_buffer = bb->code_begin();
_fingerprints = new(ResourceObj::C_HEAP, mtCode)GrowableArray<uint64_t>(32, mtCode);
_handlers = new(ResourceObj::C_HEAP, mtCode)GrowableArray<address>(32, mtCode);
_fingerprints = new (mtCode) GrowableArray<uint64_t>(32, mtCode);
_handlers = new (mtCode) GrowableArray<address>(32, mtCode);
}
address SignatureHandlerLibrary::set_handler(CodeBuffer* buffer) {

View File

@ -284,7 +284,7 @@ static const int initial_size = 64;
static int save(const StoredEdge* edge) {
assert(edge != nullptr, "invariant");
if (_leak_context_edges == nullptr) {
_leak_context_edges = new (ResourceObj::C_HEAP, mtTracing)GrowableArray<const StoredEdge*>(initial_size, mtTracing);
_leak_context_edges = new (mtTracing) GrowableArray<const StoredEdge*>(initial_size, mtTracing);
_leak_context_edges->append(nullptr); // next idx now at 1, for disambiguation in markword.
}
return _leak_context_edges->append(edge);

View File

@ -52,7 +52,7 @@ const int initial_array_size = 64;
template <typename T>
static GrowableArray<T>* c_heap_allocate_array(int size = initial_array_size) {
return new (ResourceObj::C_HEAP, mtTracing) GrowableArray<T>(size, mtTracing);
return new (mtTracing) GrowableArray<T>(size, mtTracing);
}
static GrowableArray<traceid>* unloaded_thread_id_set = NULL;

View File

@ -269,7 +269,7 @@ bool ReferenceToThreadRootClosure::do_thread_stack_detailed(JavaThread* jt) {
return true;
}
GrowableArray<jvmtiDeferredLocalVariableSet*>* const list = JvmtiDeferredUpdates::deferred_locals(jt);
GrowableArrayView<jvmtiDeferredLocalVariableSet*>* const list = JvmtiDeferredUpdates::deferred_locals(jt);
if (list != NULL) {
for (int i = 0; i < list->length(); i++) {
list->at(i)->oops_do(&rcl);

View File

@ -77,7 +77,7 @@ static InterfaceEntry& new_entry(const NetworkInterface* iface, GrowableArray<In
static GrowableArray<InterfaceEntry>* get_interfaces() {
if (_interfaces == NULL) {
_interfaces = new(ResourceObj::C_HEAP, mtTracing) GrowableArray<InterfaceEntry>(10, mtTracing);
_interfaces = new (mtTracing) GrowableArray<InterfaceEntry>(10, mtTracing);
}
return _interfaces;
}

View File

@ -263,7 +263,7 @@ void JfrThreadGroup::JfrThreadGroupEntry::set_thread_group(JfrThreadGroupPointer
}
JfrThreadGroup::JfrThreadGroup() :
_list(new (ResourceObj::C_HEAP, mtTracing) GrowableArray<JfrThreadGroupEntry*>(initial_array_size, mtTracing)) {}
_list(new (mtTracing) GrowableArray<JfrThreadGroupEntry*>(initial_array_size, mtTracing)) {}
JfrThreadGroup::~JfrThreadGroup() {
if (_list != NULL) {

View File

@ -134,10 +134,10 @@ static bool validate_recording_options(TRAPS) {
const int length = options->length();
assert(length >= 1, "invariant");
assert(dcmd_recordings_array == NULL, "invariant");
dcmd_recordings_array = new (ResourceObj::C_HEAP, mtTracing)GrowableArray<JfrStartFlightRecordingDCmd*>(length, mtTracing);
dcmd_recordings_array = new (mtTracing) GrowableArray<JfrStartFlightRecordingDCmd*>(length, mtTracing);
assert(dcmd_recordings_array != NULL, "invariant");
for (int i = 0; i < length; ++i) {
JfrStartFlightRecordingDCmd* const dcmd_recording = new(ResourceObj::C_HEAP, mtTracing) JfrStartFlightRecordingDCmd(tty, true);
JfrStartFlightRecordingDCmd* const dcmd_recording = new (mtTracing) JfrStartFlightRecordingDCmd(tty, true);
assert(dcmd_recording != NULL, "invariant");
dcmd_recordings_array->append(dcmd_recording);
if (!parse_recording_options(options->at(i), dcmd_recording, THREAD)) {

View File

@ -328,7 +328,7 @@ RepositoryIterator::RepositoryIterator(const char* repository_path) :
if (_path_buffer_file_name_offset == -1) {
return;
}
_file_names = new (ResourceObj::C_HEAP, mtTracing) GrowableArray<const char*>(10, mtTracing);
_file_names = new (mtTracing) GrowableArray<const char*>(10, mtTracing);
if (_file_names == NULL) {
log_error(jfr, system)("Unable to malloc memory during jfr emergency dump");
return;

View File

@ -769,7 +769,7 @@ bool JfrOptionSet::parse_start_flight_recording_option(const JavaVMOption** opti
const size_t value_length = strlen(value);
if (start_flight_recording_options_array == NULL) {
start_flight_recording_options_array = new (ResourceObj::C_HEAP, mtTracing) GrowableArray<const char*>(8, mtTracing);
start_flight_recording_options_array = new (mtTracing) GrowableArray<const char*>(8, mtTracing);
}
assert(start_flight_recording_options_array != NULL, "invariant");
char* const startup_value = NEW_C_HEAP_ARRAY(char, value_length + 1, mtTracing);

View File

@ -58,7 +58,7 @@ bool JfrFullStorage<ValueType, NodeType, AllocPolicy>::initialize(size_t free_li
return false;
}
for (size_t i = 0; i < free_list_prealloc_count; ++i) {
NodePtr node = new (ResourceObj::C_HEAP, mtTracing) Node();
NodePtr node = new Node();
if (node == NULL) {
return false;
}
@ -83,7 +83,7 @@ template <typename ValueType, template <typename> class NodeType, typename Alloc
inline typename JfrFullStorage<ValueType, NodeType, AllocPolicy>::NodePtr
JfrFullStorage<ValueType, NodeType, AllocPolicy>::acquire() {
NodePtr node = _free_node_list->remove();
return node != NULL ? node : new (ResourceObj::C_HEAP, mtTracing) Node();
return node != NULL ? node : new Node();
}
template <typename ValueType, template <typename> class NodeType, typename AllocPolicy>

View File

@ -51,7 +51,7 @@ static const int initial_array_size = 64;
template <typename T>
static GrowableArray<T>* c_heap_allocate_array(int size = initial_array_size) {
return new (ResourceObj::C_HEAP, mtTracing) GrowableArray<T>(size, mtTracing);
return new (mtTracing) GrowableArray<T>(size, mtTracing);
}
static bool initialize(TRAPS) {

View File

@ -36,7 +36,7 @@ static const int initial_array_size = 64;
template <typename T>
static GrowableArray<T>* c_heap_allocate_array(int size = initial_array_size) {
return new (ResourceObj::C_HEAP, mtTracing) GrowableArray<T>(size, mtTracing);
return new (mtTracing) GrowableArray<T>(size, mtTracing);
}
// Track the set of unloaded klasses during a chunk / epoch.

View File

@ -64,7 +64,7 @@ inline Node* unmask(const Node* ptr) {
}
template <typename Derived, typename Version = traceid>
class JfrLinkedNode : public ResourceObj {
class JfrLinkedNode {
public:
typedef Version VersionType;
Derived* _next;
@ -86,7 +86,7 @@ class JfrKeyIsThisNode : public JfrLinkedNode<JfrKeyIsThisNode<V> > {
};
template <typename V>
class JfrValueNode : public JfrLinkedNode<JfrValueNode<V> > {
class JfrValueNode : public JfrLinkedNode<JfrValueNode<V> >, public CHeapObj<mtTracing> {
private:
V _value;
public:

Some files were not shown because too many files have changed in this diff Show More