This commit is contained in:
Erik Helin 2014-02-17 10:13:28 +01:00
commit 83a8157179
19 changed files with 171 additions and 241 deletions

View File

@ -1704,8 +1704,8 @@ CompactibleFreeListSpace::returnChunkToDictionary(FreeChunk* chunk) {
_dictionary->return_chunk(chunk); _dictionary->return_chunk(chunk);
#ifndef PRODUCT #ifndef PRODUCT
if (CMSCollector::abstract_state() != CMSCollector::Sweeping) { if (CMSCollector::abstract_state() != CMSCollector::Sweeping) {
TreeChunk<FreeChunk, AdaptiveFreeList>* tc = TreeChunk<FreeChunk, AdaptiveFreeList>::as_TreeChunk(chunk); TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >* tc = TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::as_TreeChunk(chunk);
TreeList<FreeChunk, AdaptiveFreeList>* tl = tc->list(); TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >* tl = tc->list();
tl->verify_stats(); tl->verify_stats();
} }
#endif // PRODUCT #endif // PRODUCT
@ -2515,10 +2515,10 @@ void CompactibleFreeListSpace::verifyIndexedFreeList(size_t size) const {
#ifndef PRODUCT #ifndef PRODUCT
void CompactibleFreeListSpace::check_free_list_consistency() const { void CompactibleFreeListSpace::check_free_list_consistency() const {
assert((TreeChunk<FreeChunk, AdaptiveFreeList>::min_size() <= IndexSetSize), assert((TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::min_size() <= IndexSetSize),
"Some sizes can't be allocated without recourse to" "Some sizes can't be allocated without recourse to"
" linear allocation buffers"); " linear allocation buffers");
assert((TreeChunk<FreeChunk, AdaptiveFreeList>::min_size()*HeapWordSize == sizeof(TreeChunk<FreeChunk, AdaptiveFreeList>)), assert((TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >::min_size()*HeapWordSize == sizeof(TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >)),
"else MIN_TREE_CHUNK_SIZE is wrong"); "else MIN_TREE_CHUNK_SIZE is wrong");
assert(IndexSetStart != 0, "IndexSetStart not initialized"); assert(IndexSetStart != 0, "IndexSetStart not initialized");
assert(IndexSetStride != 0, "IndexSetStride not initialized"); assert(IndexSetStride != 0, "IndexSetStride not initialized");

View File

@ -3035,7 +3035,6 @@ void CMSCollector::verify_after_remark_work_1() {
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::ScanningOption(roots_scanning_options()), SharedHeap::ScanningOption(roots_scanning_options()),
&notOlder, &notOlder,
true, // walk code active on stacks
NULL, NULL,
NULL); // SSS: Provide correct closure NULL); // SSS: Provide correct closure
@ -3102,7 +3101,6 @@ void CMSCollector::verify_after_remark_work_2() {
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::ScanningOption(roots_scanning_options()), SharedHeap::ScanningOption(roots_scanning_options()),
&notOlder, &notOlder,
true, // walk code active on stacks
NULL, NULL,
&klass_closure); &klass_closure);
@ -3680,12 +3678,6 @@ void CMSCollector::checkpointRootsInitialWork(bool asynch) {
ResourceMark rm; ResourceMark rm;
HandleMark hm; HandleMark hm;
FalseClosure falseClosure;
// In the case of a synchronous collection, we will elide the
// remark step, so it's important to catch all the nmethod oops
// in this step.
// The final 'true' flag to gen_process_strong_roots will ensure this.
// If 'async' is true, we can relax the nmethod tracing.
MarkRefsIntoClosure notOlder(_span, &_markBitMap); MarkRefsIntoClosure notOlder(_span, &_markBitMap);
GenCollectedHeap* gch = GenCollectedHeap::heap(); GenCollectedHeap* gch = GenCollectedHeap::heap();
@ -3738,7 +3730,6 @@ void CMSCollector::checkpointRootsInitialWork(bool asynch) {
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::ScanningOption(roots_scanning_options()), SharedHeap::ScanningOption(roots_scanning_options()),
&notOlder, &notOlder,
true, // walk all of code cache if (so & SO_AllCodeCache)
NULL, NULL,
&klass_closure); &klass_closure);
} }
@ -5237,7 +5228,6 @@ void CMSParInitialMarkTask::work(uint worker_id) {
false, // this is parallel code false, // this is parallel code
SharedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()), SharedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
&par_mri_cl, &par_mri_cl,
true, // walk all of code cache if (so & SO_AllCodeCache)
NULL, NULL,
&klass_closure); &klass_closure);
assert(_collector->should_unload_classes() assert(_collector->should_unload_classes()
@ -5373,7 +5363,6 @@ void CMSParRemarkTask::work(uint worker_id) {
false, // this is parallel code false, // this is parallel code
SharedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()), SharedHeap::ScanningOption(_collector->CMSCollector::roots_scanning_options()),
&par_mrias_cl, &par_mrias_cl,
true, // walk all of code cache if (so & SO_AllCodeCache)
NULL, NULL,
NULL); // The dirty klasses will be handled below NULL); // The dirty klasses will be handled below
assert(_collector->should_unload_classes() assert(_collector->should_unload_classes()
@ -5963,7 +5952,6 @@ void CMSCollector::do_remark_non_parallel() {
false, // use the local StrongRootsScope false, // use the local StrongRootsScope
SharedHeap::ScanningOption(roots_scanning_options()), SharedHeap::ScanningOption(roots_scanning_options()),
&mrias_cl, &mrias_cl,
true, // walk code active on stacks
NULL, NULL,
NULL); // The dirty klasses will be handled below NULL); // The dirty klasses will be handled below

View File

@ -1383,13 +1383,6 @@ class ASConcurrentMarkSweepGeneration : public ConcurrentMarkSweepGeneration {
// Closures of various sorts used by CMS to accomplish its work // Closures of various sorts used by CMS to accomplish its work
// //
// This closure is used to check that a certain set of oops is empty.
class FalseClosure: public OopClosure {
public:
void do_oop(oop* p) { guarantee(false, "Should be an empty set"); }
void do_oop(narrowOop* p) { guarantee(false, "Should be an empty set"); }
};
// This closure is used to do concurrent marking from the roots // This closure is used to do concurrent marking from the roots
// following the first checkpoint. // following the first checkpoint.
class MarkFromRootsClosure: public BitMapClosure { class MarkFromRootsClosure: public BitMapClosure {

View File

@ -3394,13 +3394,12 @@ void G1CollectedHeap::verify(bool silent, VerifyOption vo) {
if (!silent) { gclog_or_tty->print("Roots "); } if (!silent) { gclog_or_tty->print("Roots "); }
VerifyRootsClosure rootsCl(vo); VerifyRootsClosure rootsCl(vo);
G1VerifyCodeRootOopClosure codeRootsCl(this, &rootsCl, vo);
G1VerifyCodeRootBlobClosure blobsCl(&codeRootsCl);
VerifyKlassClosure klassCl(this, &rootsCl); VerifyKlassClosure klassCl(this, &rootsCl);
// We apply the relevant closures to all the oops in the // We apply the relevant closures to all the oops in the
// system dictionary, the string table and the code cache. // system dictionary, class loader data graph and the string table.
const int so = SO_AllClasses | SO_Strings | SO_AllCodeCache; // Don't verify the code cache here, since it's verified below.
const int so = SO_AllClasses | SO_Strings;
// Need cleared claim bits for the strong roots processing // Need cleared claim bits for the strong roots processing
ClassLoaderDataGraph::clear_claimed_marks(); ClassLoaderDataGraph::clear_claimed_marks();
@ -3408,10 +3407,14 @@ void G1CollectedHeap::verify(bool silent, VerifyOption vo) {
process_strong_roots(true, // activate StrongRootsScope process_strong_roots(true, // activate StrongRootsScope
ScanningOption(so), // roots scanning options ScanningOption(so), // roots scanning options
&rootsCl, &rootsCl,
&blobsCl,
&klassCl &klassCl
); );
// Verify the nmethods in the code cache.
G1VerifyCodeRootOopClosure codeRootsCl(this, &rootsCl, vo);
G1VerifyCodeRootBlobClosure blobsCl(&codeRootsCl);
CodeCache::blobs_do(&blobsCl);
bool failures = rootsCl.failures() || codeRootsCl.failures(); bool failures = rootsCl.failures() || codeRootsCl.failures();
if (vo != VerifyOption_G1UseMarkWord) { if (vo != VerifyOption_G1UseMarkWord) {
@ -5115,12 +5118,9 @@ g1_process_strong_roots(bool is_scavenging,
BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots); BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots);
CodeBlobToOopClosure scan_code_roots(&buf_scan_non_heap_roots, true /* do_marking */);
process_strong_roots(false, // no scoping; this is parallel code process_strong_roots(false, // no scoping; this is parallel code
so, so,
&buf_scan_non_heap_roots, &buf_scan_non_heap_roots,
&scan_code_roots,
scan_klasses scan_klasses
); );
@ -5180,12 +5180,6 @@ g1_process_strong_roots(bool is_scavenging,
_process_strong_tasks->all_tasks_completed(); _process_strong_tasks->all_tasks_completed();
} }
void
G1CollectedHeap::g1_process_weak_roots(OopClosure* root_closure) {
CodeBlobToOopClosure roots_in_blobs(root_closure, /*do_marking=*/ false);
SharedHeap::process_weak_roots(root_closure, &roots_in_blobs);
}
class G1StringSymbolTableUnlinkTask : public AbstractGangTask { class G1StringSymbolTableUnlinkTask : public AbstractGangTask {
private: private:
BoolObjectClosure* _is_alive; BoolObjectClosure* _is_alive;

View File

@ -833,11 +833,6 @@ protected:
G1KlassScanClosure* scan_klasses, G1KlassScanClosure* scan_klasses,
int worker_i); int worker_i);
// Apply "blk" to all the weak roots of the system. These include
// JNI weak roots, the code cache, system dictionary, symbol table,
// string table, and referents of reachable weak refs.
void g1_process_weak_roots(OopClosure* root_closure);
// Frees a non-humongous region by initializing its contents and // Frees a non-humongous region by initializing its contents and
// adding it to the free list that's passed as a parameter (this is // adding it to the free list that's passed as a parameter (this is
// usually a local list which will be appended to the master free // usually a local list which will be appended to the master free

View File

@ -133,7 +133,6 @@ void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
sh->process_strong_roots(true, // activate StrongRootsScope sh->process_strong_roots(true, // activate StrongRootsScope
SharedHeap::SO_SystemClasses, SharedHeap::SO_SystemClasses,
&GenMarkSweep::follow_root_closure, &GenMarkSweep::follow_root_closure,
&GenMarkSweep::follow_code_root_closure,
&GenMarkSweep::follow_klass_closure); &GenMarkSweep::follow_klass_closure);
// Process reference objects found during marking // Process reference objects found during marking
@ -307,9 +306,8 @@ void G1MarkSweep::mark_sweep_phase3() {
ClassLoaderDataGraph::clear_claimed_marks(); ClassLoaderDataGraph::clear_claimed_marks();
sh->process_strong_roots(true, // activate StrongRootsScope sh->process_strong_roots(true, // activate StrongRootsScope
SharedHeap::SO_AllClasses, SharedHeap::SO_AllClasses | SharedHeap::SO_Strings | SharedHeap::SO_AllCodeCache,
&GenMarkSweep::adjust_pointer_closure, &GenMarkSweep::adjust_pointer_closure,
NULL, // do not touch code cache here
&GenMarkSweep::adjust_klass_closure); &GenMarkSweep::adjust_klass_closure);
assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
@ -317,7 +315,7 @@ void G1MarkSweep::mark_sweep_phase3() {
// Now adjust pointers in remaining weak roots. (All of which should // Now adjust pointers in remaining weak roots. (All of which should
// have been cleared if they pointed to non-surviving objects.) // have been cleared if they pointed to non-surviving objects.)
g1h->g1_process_weak_roots(&GenMarkSweep::adjust_pointer_closure); sh->process_weak_roots(&GenMarkSweep::adjust_pointer_closure);
GenMarkSweep::adjust_marks(); GenMarkSweep::adjust_marks();

View File

@ -621,7 +621,6 @@ void ParNewGenTask::work(uint worker_id) {
false, // no scope; this is parallel code false, // no scope; this is parallel code
SharedHeap::ScanningOption(so), SharedHeap::ScanningOption(so),
&par_scan_state.to_space_root_closure(), &par_scan_state.to_space_root_closure(),
true, // walk *all* scavengable nmethods
&par_scan_state.older_gen_closure(), &par_scan_state.older_gen_closure(),
&klass_scan_closure); &klass_scan_closure);
par_scan_state.end_strong_roots(); par_scan_state.end_strong_roots();

View File

@ -47,7 +47,6 @@ STWGCTimer* MarkSweep::_gc_timer = NULL;
SerialOldTracer* MarkSweep::_gc_tracer = NULL; SerialOldTracer* MarkSweep::_gc_tracer = NULL;
MarkSweep::FollowRootClosure MarkSweep::follow_root_closure; MarkSweep::FollowRootClosure MarkSweep::follow_root_closure;
CodeBlobToOopClosure MarkSweep::follow_code_root_closure(&MarkSweep::follow_root_closure, /*do_marking=*/ true);
void MarkSweep::FollowRootClosure::do_oop(oop* p) { follow_root(p); } void MarkSweep::FollowRootClosure::do_oop(oop* p) { follow_root(p); }
void MarkSweep::FollowRootClosure::do_oop(narrowOop* p) { follow_root(p); } void MarkSweep::FollowRootClosure::do_oop(narrowOop* p) { follow_root(p); }

View File

@ -143,7 +143,6 @@ class MarkSweep : AllStatic {
// Public closures // Public closures
static IsAliveClosure is_alive; static IsAliveClosure is_alive;
static FollowRootClosure follow_root_closure; static FollowRootClosure follow_root_closure;
static CodeBlobToOopClosure follow_code_root_closure; // => follow_root_closure
static MarkAndPushClosure mark_and_push_closure; static MarkAndPushClosure mark_and_push_closure;
static FollowKlassClosure follow_klass_closure; static FollowKlassClosure follow_klass_closure;
static FollowStackClosure follow_stack_closure; static FollowStackClosure follow_stack_closure;

View File

@ -44,16 +44,16 @@
// This is currently used in the Concurrent Mark&Sweep implementation. // This is currently used in the Concurrent Mark&Sweep implementation.
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t TreeChunk<Chunk_t, FreeList_t>::_min_tree_chunk_size = sizeof(TreeChunk<Chunk_t, FreeList_t>)/HeapWordSize; size_t TreeChunk<Chunk_t, FreeList_t>::_min_tree_chunk_size = sizeof(TreeChunk<Chunk_t, FreeList_t>)/HeapWordSize;
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(Chunk_t* fc) { TreeChunk<Chunk_t, FreeList_t>* TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(Chunk_t* fc) {
// Do some assertion checking here. // Do some assertion checking here.
return (TreeChunk<Chunk_t, FreeList_t>*) fc; return (TreeChunk<Chunk_t, FreeList_t>*) fc;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void TreeChunk<Chunk_t, FreeList_t>::verify_tree_chunk_list() const { void TreeChunk<Chunk_t, FreeList_t>::verify_tree_chunk_list() const {
TreeChunk<Chunk_t, FreeList_t>* nextTC = (TreeChunk<Chunk_t, FreeList_t>*)next(); TreeChunk<Chunk_t, FreeList_t>* nextTC = (TreeChunk<Chunk_t, FreeList_t>*)next();
if (prev() != NULL) { // interior list node shouldn't have tree fields if (prev() != NULL) { // interior list node shouldn't have tree fields
@ -67,11 +67,11 @@ void TreeChunk<Chunk_t, FreeList_t>::verify_tree_chunk_list() const {
} }
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>::TreeList() : _parent(NULL), TreeList<Chunk_t, FreeList_t>::TreeList() : _parent(NULL),
_left(NULL), _right(NULL) {} _left(NULL), _right(NULL) {}
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>*
TreeList<Chunk_t, FreeList_t>::as_TreeList(TreeChunk<Chunk_t,FreeList_t>* tc) { TreeList<Chunk_t, FreeList_t>::as_TreeList(TreeChunk<Chunk_t,FreeList_t>* tc) {
// This first free chunk in the list will be the tree list. // This first free chunk in the list will be the tree list.
@ -88,20 +88,7 @@ TreeList<Chunk_t, FreeList_t>::as_TreeList(TreeChunk<Chunk_t,FreeList_t>* tc) {
return tl; return tl;
} }
template <class Chunk_t, class FreeList_t>
template <class Chunk_t, template <class> class FreeList_t>
TreeList<Chunk_t, FreeList_t>*
get_chunk(size_t size, enum FreeBlockDictionary<Chunk_t>::Dither dither) {
FreeBlockDictionary<Chunk_t>::verify_par_locked();
Chunk_t* res = get_chunk_from_tree(size, dither);
assert(res == NULL || res->is_free(),
"Should be returning a free chunk");
assert(dither != FreeBlockDictionary<Chunk_t>::exactly ||
res->size() == size, "Not correct size");
return res;
}
template <class Chunk_t, template <class> class FreeList_t>
TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>*
TreeList<Chunk_t, FreeList_t>::as_TreeList(HeapWord* addr, size_t size) { TreeList<Chunk_t, FreeList_t>::as_TreeList(HeapWord* addr, size_t size) {
TreeChunk<Chunk_t, FreeList_t>* tc = (TreeChunk<Chunk_t, FreeList_t>*) addr; TreeChunk<Chunk_t, FreeList_t>* tc = (TreeChunk<Chunk_t, FreeList_t>*) addr;
@ -125,17 +112,17 @@ TreeList<Chunk_t, FreeList_t>::as_TreeList(HeapWord* addr, size_t size) {
// an over populated size. The general get_better_list() just returns // an over populated size. The general get_better_list() just returns
// the current list. // the current list.
template <> template <>
TreeList<FreeChunk, AdaptiveFreeList>* TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >*
TreeList<FreeChunk, AdaptiveFreeList>::get_better_list( TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >::get_better_list(
BinaryTreeDictionary<FreeChunk, ::AdaptiveFreeList>* dictionary) { BinaryTreeDictionary<FreeChunk, ::AdaptiveFreeList<FreeChunk> >* dictionary) {
// A candidate chunk has been found. If it is already under // A candidate chunk has been found. If it is already under
// populated, get a chunk associated with the hint for this // populated, get a chunk associated with the hint for this
// chunk. // chunk.
TreeList<FreeChunk, ::AdaptiveFreeList>* curTL = this; TreeList<FreeChunk, ::AdaptiveFreeList<FreeChunk> >* curTL = this;
if (surplus() <= 0) { if (surplus() <= 0) {
/* Use the hint to find a size with a surplus, and reset the hint. */ /* Use the hint to find a size with a surplus, and reset the hint. */
TreeList<FreeChunk, ::AdaptiveFreeList>* hintTL = this; TreeList<FreeChunk, ::AdaptiveFreeList<FreeChunk> >* hintTL = this;
while (hintTL->hint() != 0) { while (hintTL->hint() != 0) {
assert(hintTL->hint() > hintTL->size(), assert(hintTL->hint() > hintTL->size(),
"hint points in the wrong direction"); "hint points in the wrong direction");
@ -163,14 +150,14 @@ TreeList<FreeChunk, AdaptiveFreeList>::get_better_list(
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>*
TreeList<Chunk_t, FreeList_t>::get_better_list( TreeList<Chunk_t, FreeList_t>::get_better_list(
BinaryTreeDictionary<Chunk_t, FreeList_t>* dictionary) { BinaryTreeDictionary<Chunk_t, FreeList_t>* dictionary) {
return this; return this;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::remove_chunk_replace_if_needed(TreeChunk<Chunk_t, FreeList_t>* tc) { TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::remove_chunk_replace_if_needed(TreeChunk<Chunk_t, FreeList_t>* tc) {
TreeList<Chunk_t, FreeList_t>* retTL = this; TreeList<Chunk_t, FreeList_t>* retTL = this;
@ -286,7 +273,7 @@ TreeList<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::remove_chunk_repla
return retTL; return retTL;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void TreeList<Chunk_t, FreeList_t>::return_chunk_at_tail(TreeChunk<Chunk_t, FreeList_t>* chunk) { void TreeList<Chunk_t, FreeList_t>::return_chunk_at_tail(TreeChunk<Chunk_t, FreeList_t>* chunk) {
assert(chunk != NULL, "returning NULL chunk"); assert(chunk != NULL, "returning NULL chunk");
assert(chunk->list() == this, "list should be set for chunk"); assert(chunk->list() == this, "list should be set for chunk");
@ -301,7 +288,7 @@ void TreeList<Chunk_t, FreeList_t>::return_chunk_at_tail(TreeChunk<Chunk_t, Free
this->link_tail(chunk); this->link_tail(chunk);
assert(!tail() || size() == tail()->size(), "Wrong sized chunk in list"); assert(!tail() || size() == tail()->size(), "Wrong sized chunk in list");
FreeList_t<Chunk_t>::increment_count(); FreeList_t::increment_count();
debug_only(this->increment_returned_bytes_by(chunk->size()*sizeof(HeapWord));) debug_only(this->increment_returned_bytes_by(chunk->size()*sizeof(HeapWord));)
assert(head() == NULL || head()->prev() == NULL, "list invariant"); assert(head() == NULL || head()->prev() == NULL, "list invariant");
assert(tail() == NULL || tail()->next() == NULL, "list invariant"); assert(tail() == NULL || tail()->next() == NULL, "list invariant");
@ -311,7 +298,7 @@ void TreeList<Chunk_t, FreeList_t>::return_chunk_at_tail(TreeChunk<Chunk_t, Free
// is defined to be after the chunk pointer to by head(). This is // is defined to be after the chunk pointer to by head(). This is
// because the TreeList<Chunk_t, FreeList_t> is embedded in the first TreeChunk<Chunk_t, FreeList_t> in the // because the TreeList<Chunk_t, FreeList_t> is embedded in the first TreeChunk<Chunk_t, FreeList_t> in the
// list. See the definition of TreeChunk<Chunk_t, FreeList_t>. // list. See the definition of TreeChunk<Chunk_t, FreeList_t>.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void TreeList<Chunk_t, FreeList_t>::return_chunk_at_head(TreeChunk<Chunk_t, FreeList_t>* chunk) { void TreeList<Chunk_t, FreeList_t>::return_chunk_at_head(TreeChunk<Chunk_t, FreeList_t>* chunk) {
assert(chunk->list() == this, "list should be set for chunk"); assert(chunk->list() == this, "list should be set for chunk");
assert(head() != NULL, "The tree list is embedded in the first chunk"); assert(head() != NULL, "The tree list is embedded in the first chunk");
@ -329,13 +316,13 @@ void TreeList<Chunk_t, FreeList_t>::return_chunk_at_head(TreeChunk<Chunk_t, Free
} }
head()->link_after(chunk); head()->link_after(chunk);
assert(!head() || size() == head()->size(), "Wrong sized chunk in list"); assert(!head() || size() == head()->size(), "Wrong sized chunk in list");
FreeList_t<Chunk_t>::increment_count(); FreeList_t::increment_count();
debug_only(this->increment_returned_bytes_by(chunk->size()*sizeof(HeapWord));) debug_only(this->increment_returned_bytes_by(chunk->size()*sizeof(HeapWord));)
assert(head() == NULL || head()->prev() == NULL, "list invariant"); assert(head() == NULL || head()->prev() == NULL, "list invariant");
assert(tail() == NULL || tail()->next() == NULL, "list invariant"); assert(tail() == NULL || tail()->next() == NULL, "list invariant");
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void TreeChunk<Chunk_t, FreeList_t>::assert_is_mangled() const { void TreeChunk<Chunk_t, FreeList_t>::assert_is_mangled() const {
assert((ZapUnusedHeapArea && assert((ZapUnusedHeapArea &&
SpaceMangler::is_mangled((HeapWord*) Chunk_t::size_addr()) && SpaceMangler::is_mangled((HeapWord*) Chunk_t::size_addr()) &&
@ -345,14 +332,14 @@ void TreeChunk<Chunk_t, FreeList_t>::assert_is_mangled() const {
"Space should be clear or mangled"); "Space should be clear or mangled");
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::head_as_TreeChunk() { TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::head_as_TreeChunk() {
assert(head() == NULL || (TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(head())->list() == this), assert(head() == NULL || (TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(head())->list() == this),
"Wrong type of chunk?"); "Wrong type of chunk?");
return TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(head()); return TreeChunk<Chunk_t, FreeList_t>::as_TreeChunk(head());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::first_available() { TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::first_available() {
assert(head() != NULL, "The head of the list cannot be NULL"); assert(head() != NULL, "The head of the list cannot be NULL");
Chunk_t* fc = head()->next(); Chunk_t* fc = head()->next();
@ -369,7 +356,7 @@ TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::first_available()
// Returns the block with the largest heap address amongst // Returns the block with the largest heap address amongst
// those in the list for this size; potentially slow and expensive, // those in the list for this size; potentially slow and expensive,
// use with caution! // use with caution!
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::largest_address() { TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::largest_address() {
assert(head() != NULL, "The head of the list cannot be NULL"); assert(head() != NULL, "The head of the list cannot be NULL");
Chunk_t* fc = head()->next(); Chunk_t* fc = head()->next();
@ -392,7 +379,7 @@ TreeChunk<Chunk_t, FreeList_t>* TreeList<Chunk_t, FreeList_t>::largest_address()
return retTC; return retTC;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
BinaryTreeDictionary<Chunk_t, FreeList_t>::BinaryTreeDictionary(MemRegion mr) { BinaryTreeDictionary<Chunk_t, FreeList_t>::BinaryTreeDictionary(MemRegion mr) {
assert((mr.byte_size() > min_size()), "minimum chunk size"); assert((mr.byte_size() > min_size()), "minimum chunk size");
@ -405,17 +392,17 @@ BinaryTreeDictionary<Chunk_t, FreeList_t>::BinaryTreeDictionary(MemRegion mr) {
assert(total_free_blocks() == 1, "reset check failed"); assert(total_free_blocks() == 1, "reset check failed");
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::inc_total_size(size_t inc) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::inc_total_size(size_t inc) {
_total_size = _total_size + inc; _total_size = _total_size + inc;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::dec_total_size(size_t dec) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::dec_total_size(size_t dec) {
_total_size = _total_size - dec; _total_size = _total_size - dec;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset(MemRegion mr) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset(MemRegion mr) {
assert((mr.byte_size() > min_size()), "minimum chunk size"); assert((mr.byte_size() > min_size()), "minimum chunk size");
set_root(TreeList<Chunk_t, FreeList_t>::as_TreeList(mr.start(), mr.word_size())); set_root(TreeList<Chunk_t, FreeList_t>::as_TreeList(mr.start(), mr.word_size()));
@ -423,13 +410,13 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset(MemRegion mr) {
set_total_free_blocks(1); set_total_free_blocks(1);
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset(HeapWord* addr, size_t byte_size) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset(HeapWord* addr, size_t byte_size) {
MemRegion mr(addr, heap_word_size(byte_size)); MemRegion mr(addr, heap_word_size(byte_size));
reset(mr); reset(mr);
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset() { void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset() {
set_root(NULL); set_root(NULL);
set_total_size(0); set_total_size(0);
@ -437,7 +424,7 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::reset() {
} }
// Get a free block of size at least size from tree, or NULL. // Get a free block of size at least size from tree, or NULL.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeChunk<Chunk_t, FreeList_t>*
BinaryTreeDictionary<Chunk_t, FreeList_t>::get_chunk_from_tree( BinaryTreeDictionary<Chunk_t, FreeList_t>::get_chunk_from_tree(
size_t size, size_t size,
@ -496,7 +483,7 @@ BinaryTreeDictionary<Chunk_t, FreeList_t>::get_chunk_from_tree(
return retTC; return retTC;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_list(size_t size) const { TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_list(size_t size) const {
TreeList<Chunk_t, FreeList_t>* curTL; TreeList<Chunk_t, FreeList_t>* curTL;
for (curTL = root(); curTL != NULL;) { for (curTL = root(); curTL != NULL;) {
@ -515,7 +502,7 @@ TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_l
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
bool BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_chunk_in_free_list(Chunk_t* tc) const { bool BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_chunk_in_free_list(Chunk_t* tc) const {
size_t size = tc->size(); size_t size = tc->size();
TreeList<Chunk_t, FreeList_t>* tl = find_list(size); TreeList<Chunk_t, FreeList_t>* tl = find_list(size);
@ -526,7 +513,7 @@ bool BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_chunk_in_free_list(Chunk_
} }
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_largest_dict() const { Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_largest_dict() const {
TreeList<Chunk_t, FreeList_t> *curTL = root(); TreeList<Chunk_t, FreeList_t> *curTL = root();
if (curTL != NULL) { if (curTL != NULL) {
@ -541,7 +528,7 @@ Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_largest_dict() const {
// chunk in a list on a tree node, just unlink it. // chunk in a list on a tree node, just unlink it.
// If it is the last chunk in the list (the next link is NULL), // If it is the last chunk in the list (the next link is NULL),
// remove the node and repair the tree. // remove the node and repair the tree.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeChunk<Chunk_t, FreeList_t>* TreeChunk<Chunk_t, FreeList_t>*
BinaryTreeDictionary<Chunk_t, FreeList_t>::remove_chunk_from_tree(TreeChunk<Chunk_t, FreeList_t>* tc) { BinaryTreeDictionary<Chunk_t, FreeList_t>::remove_chunk_from_tree(TreeChunk<Chunk_t, FreeList_t>* tc) {
assert(tc != NULL, "Should not call with a NULL chunk"); assert(tc != NULL, "Should not call with a NULL chunk");
@ -682,7 +669,7 @@ BinaryTreeDictionary<Chunk_t, FreeList_t>::remove_chunk_from_tree(TreeChunk<Chun
// Remove the leftmost node (lm) in the tree and return it. // Remove the leftmost node (lm) in the tree and return it.
// If lm has a right child, link it to the left node of // If lm has a right child, link it to the left node of
// the parent of lm. // the parent of lm.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::remove_tree_minimum(TreeList<Chunk_t, FreeList_t>* tl) { TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::remove_tree_minimum(TreeList<Chunk_t, FreeList_t>* tl) {
assert(tl != NULL && tl->parent() != NULL, "really need a proper sub-tree"); assert(tl != NULL && tl->parent() != NULL, "really need a proper sub-tree");
// locate the subtree minimum by walking down left branches // locate the subtree minimum by walking down left branches
@ -717,7 +704,7 @@ TreeList<Chunk_t, FreeList_t>* BinaryTreeDictionary<Chunk_t, FreeList_t>::remove
return curTL; return curTL;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::insert_chunk_in_tree(Chunk_t* fc) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::insert_chunk_in_tree(Chunk_t* fc) {
TreeList<Chunk_t, FreeList_t> *curTL, *prevTL; TreeList<Chunk_t, FreeList_t> *curTL, *prevTL;
size_t size = fc->size(); size_t size = fc->size();
@ -783,7 +770,7 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::insert_chunk_in_tree(Chunk_t* fc
} }
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::max_chunk_size() const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::max_chunk_size() const {
FreeBlockDictionary<Chunk_t>::verify_par_locked(); FreeBlockDictionary<Chunk_t>::verify_par_locked();
TreeList<Chunk_t, FreeList_t>* tc = root(); TreeList<Chunk_t, FreeList_t>* tc = root();
@ -792,7 +779,7 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::max_chunk_size() const {
return tc->size(); return tc->size();
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_list_length(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_list_length(TreeList<Chunk_t, FreeList_t>* tl) const {
size_t res; size_t res;
res = tl->count(); res = tl->count();
@ -805,7 +792,7 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_list_length(TreeList<Chu
return res; return res;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_size_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_size_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const {
if (tl == NULL) if (tl == NULL)
return 0; return 0;
@ -814,7 +801,7 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_size_in_tree(TreeList<Ch
total_size_in_tree(tl->right()); total_size_in_tree(tl->right());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
double BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_of_squared_block_sizes(TreeList<Chunk_t, FreeList_t>* const tl) const { double BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_of_squared_block_sizes(TreeList<Chunk_t, FreeList_t>* const tl) const {
if (tl == NULL) { if (tl == NULL) {
return 0.0; return 0.0;
@ -826,7 +813,7 @@ double BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_of_squared_block_sizes(Tre
return curr; return curr;
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_free_blocks_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_free_blocks_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const {
if (tl == NULL) if (tl == NULL)
return 0; return 0;
@ -835,14 +822,14 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_free_blocks_in_tree(Tree
total_free_blocks_in_tree(tl->right()); total_free_blocks_in_tree(tl->right());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::num_free_blocks() const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::num_free_blocks() const {
assert(total_free_blocks_in_tree(root()) == total_free_blocks(), assert(total_free_blocks_in_tree(root()) == total_free_blocks(),
"_total_free_blocks inconsistency"); "_total_free_blocks inconsistency");
return total_free_blocks(); return total_free_blocks();
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::tree_height_helper(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::tree_height_helper(TreeList<Chunk_t, FreeList_t>* tl) const {
if (tl == NULL) if (tl == NULL)
return 0; return 0;
@ -850,12 +837,12 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::tree_height_helper(TreeList<Ch
tree_height_helper(tl->right())); tree_height_helper(tl->right()));
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::tree_height() const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::tree_height() const {
return tree_height_helper(root()); return tree_height_helper(root());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_helper(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_helper(TreeList<Chunk_t, FreeList_t>* tl) const {
if (tl == NULL) { if (tl == NULL) {
return 0; return 0;
@ -864,18 +851,18 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_helper(TreeList<Ch
total_nodes_helper(tl->right()); total_nodes_helper(tl->right());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const {
return total_nodes_helper(root()); return total_nodes_helper(root());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::dict_census_update(size_t size, bool split, bool birth){} void BinaryTreeDictionary<Chunk_t, FreeList_t>::dict_census_update(size_t size, bool split, bool birth){}
#if INCLUDE_ALL_GCS #if INCLUDE_ALL_GCS
template <> template <>
void AFLBinaryTreeDictionary::dict_census_update(size_t size, bool split, bool birth) { void AFLBinaryTreeDictionary::dict_census_update(size_t size, bool split, bool birth) {
TreeList<FreeChunk, AdaptiveFreeList>* nd = find_list(size); TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >* nd = find_list(size);
if (nd) { if (nd) {
if (split) { if (split) {
if (birth) { if (birth) {
@ -903,7 +890,7 @@ void AFLBinaryTreeDictionary::dict_census_update(size_t size, bool split, bool b
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
bool BinaryTreeDictionary<Chunk_t, FreeList_t>::coal_dict_over_populated(size_t size) { bool BinaryTreeDictionary<Chunk_t, FreeList_t>::coal_dict_over_populated(size_t size) {
// For the general type of freelists, encourage coalescing by // For the general type of freelists, encourage coalescing by
// returning true. // returning true.
@ -915,7 +902,7 @@ template <>
bool AFLBinaryTreeDictionary::coal_dict_over_populated(size_t size) { bool AFLBinaryTreeDictionary::coal_dict_over_populated(size_t size) {
if (FLSAlwaysCoalesceLarge) return true; if (FLSAlwaysCoalesceLarge) return true;
TreeList<FreeChunk, AdaptiveFreeList>* list_of_size = find_list(size); TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >* list_of_size = find_list(size);
// None of requested size implies overpopulated. // None of requested size implies overpopulated.
return list_of_size == NULL || list_of_size->coal_desired() <= 0 || return list_of_size == NULL || list_of_size->coal_desired() <= 0 ||
list_of_size->count() > list_of_size->coal_desired(); list_of_size->count() > list_of_size->coal_desired();
@ -928,15 +915,15 @@ bool AFLBinaryTreeDictionary::coal_dict_over_populated(size_t size) {
// do_tree() walks the nodes in the binary tree applying do_list() // do_tree() walks the nodes in the binary tree applying do_list()
// to each list at each node. // to each list at each node.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class TreeCensusClosure : public StackObj { class TreeCensusClosure : public StackObj {
protected: protected:
virtual void do_list(FreeList_t<Chunk_t>* fl) = 0; virtual void do_list(FreeList_t* fl) = 0;
public: public:
virtual void do_tree(TreeList<Chunk_t, FreeList_t>* tl) = 0; virtual void do_tree(TreeList<Chunk_t, FreeList_t>* tl) = 0;
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class AscendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> { class AscendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> {
public: public:
void do_tree(TreeList<Chunk_t, FreeList_t>* tl) { void do_tree(TreeList<Chunk_t, FreeList_t>* tl) {
@ -948,7 +935,7 @@ class AscendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> {
} }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class DescendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> { class DescendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> {
public: public:
void do_tree(TreeList<Chunk_t, FreeList_t>* tl) { void do_tree(TreeList<Chunk_t, FreeList_t>* tl) {
@ -962,7 +949,7 @@ class DescendTreeCensusClosure : public TreeCensusClosure<Chunk_t, FreeList_t> {
// For each list in the tree, calculate the desired, desired // For each list in the tree, calculate the desired, desired
// coalesce, count before sweep, and surplus before sweep. // coalesce, count before sweep, and surplus before sweep.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class BeginSweepClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class BeginSweepClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
double _percentage; double _percentage;
float _inter_sweep_current; float _inter_sweep_current;
@ -995,16 +982,16 @@ class BeginSweepClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
// Similar to TreeCensusClosure but searches the // Similar to TreeCensusClosure but searches the
// tree and returns promptly when found. // tree and returns promptly when found.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class TreeSearchClosure : public StackObj { class TreeSearchClosure : public StackObj {
protected: protected:
virtual bool do_list(FreeList_t<Chunk_t>* fl) = 0; virtual bool do_list(FreeList_t* fl) = 0;
public: public:
virtual bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) = 0; virtual bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) = 0;
}; };
#if 0 // Don't need this yet but here for symmetry. #if 0 // Don't need this yet but here for symmetry.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class AscendTreeSearchClosure : public TreeSearchClosure<Chunk_t> { class AscendTreeSearchClosure : public TreeSearchClosure<Chunk_t> {
public: public:
bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) { bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) {
@ -1018,7 +1005,7 @@ class AscendTreeSearchClosure : public TreeSearchClosure<Chunk_t> {
}; };
#endif #endif
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class DescendTreeSearchClosure : public TreeSearchClosure<Chunk_t, FreeList_t> { class DescendTreeSearchClosure : public TreeSearchClosure<Chunk_t, FreeList_t> {
public: public:
bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) { bool do_tree(TreeList<Chunk_t, FreeList_t>* tl) {
@ -1033,14 +1020,14 @@ class DescendTreeSearchClosure : public TreeSearchClosure<Chunk_t, FreeList_t> {
// Searches the tree for a chunk that ends at the // Searches the tree for a chunk that ends at the
// specified address. // specified address.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class EndTreeSearchClosure : public DescendTreeSearchClosure<Chunk_t, FreeList_t> { class EndTreeSearchClosure : public DescendTreeSearchClosure<Chunk_t, FreeList_t> {
HeapWord* _target; HeapWord* _target;
Chunk_t* _found; Chunk_t* _found;
public: public:
EndTreeSearchClosure(HeapWord* target) : _target(target), _found(NULL) {} EndTreeSearchClosure(HeapWord* target) : _target(target), _found(NULL) {}
bool do_list(FreeList_t<Chunk_t>* fl) { bool do_list(FreeList_t* fl) {
Chunk_t* item = fl->head(); Chunk_t* item = fl->head();
while (item != NULL) { while (item != NULL) {
if (item->end() == (uintptr_t*) _target) { if (item->end() == (uintptr_t*) _target) {
@ -1054,7 +1041,7 @@ class EndTreeSearchClosure : public DescendTreeSearchClosure<Chunk_t, FreeList_t
Chunk_t* found() { return _found; } Chunk_t* found() { return _found; }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_chunk_ends_at(HeapWord* target) const { Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_chunk_ends_at(HeapWord* target) const {
EndTreeSearchClosure<Chunk_t, FreeList_t> etsc(target); EndTreeSearchClosure<Chunk_t, FreeList_t> etsc(target);
bool found_target = etsc.do_tree(root()); bool found_target = etsc.do_tree(root());
@ -1063,7 +1050,7 @@ Chunk_t* BinaryTreeDictionary<Chunk_t, FreeList_t>::find_chunk_ends_at(HeapWord*
return etsc.found(); return etsc.found();
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::begin_sweep_dict_census(double coalSurplusPercent, void BinaryTreeDictionary<Chunk_t, FreeList_t>::begin_sweep_dict_census(double coalSurplusPercent,
float inter_sweep_current, float inter_sweep_estimate, float intra_sweep_estimate) { float inter_sweep_current, float inter_sweep_estimate, float intra_sweep_estimate) {
BeginSweepClosure<Chunk_t, FreeList_t> bsc(coalSurplusPercent, inter_sweep_current, BeginSweepClosure<Chunk_t, FreeList_t> bsc(coalSurplusPercent, inter_sweep_current,
@ -1075,32 +1062,32 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::begin_sweep_dict_census(double c
// Closures and methods for calculating total bytes returned to the // Closures and methods for calculating total bytes returned to the
// free lists in the tree. // free lists in the tree.
#ifndef PRODUCT #ifndef PRODUCT
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class InitializeDictReturnedBytesClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class InitializeDictReturnedBytesClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
public: public:
void do_list(FreeList_t<Chunk_t>* fl) { void do_list(FreeList_t* fl) {
fl->set_returned_bytes(0); fl->set_returned_bytes(0);
} }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::initialize_dict_returned_bytes() { void BinaryTreeDictionary<Chunk_t, FreeList_t>::initialize_dict_returned_bytes() {
InitializeDictReturnedBytesClosure<Chunk_t, FreeList_t> idrb; InitializeDictReturnedBytesClosure<Chunk_t, FreeList_t> idrb;
idrb.do_tree(root()); idrb.do_tree(root());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class ReturnedBytesClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class ReturnedBytesClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
size_t _dict_returned_bytes; size_t _dict_returned_bytes;
public: public:
ReturnedBytesClosure() { _dict_returned_bytes = 0; } ReturnedBytesClosure() { _dict_returned_bytes = 0; }
void do_list(FreeList_t<Chunk_t>* fl) { void do_list(FreeList_t* fl) {
_dict_returned_bytes += fl->returned_bytes(); _dict_returned_bytes += fl->returned_bytes();
} }
size_t dict_returned_bytes() { return _dict_returned_bytes; } size_t dict_returned_bytes() { return _dict_returned_bytes; }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_dict_returned_bytes() { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_dict_returned_bytes() {
ReturnedBytesClosure<Chunk_t, FreeList_t> rbc; ReturnedBytesClosure<Chunk_t, FreeList_t> rbc;
rbc.do_tree(root()); rbc.do_tree(root());
@ -1109,17 +1096,17 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::sum_dict_returned_bytes() {
} }
// Count the number of entries in the tree. // Count the number of entries in the tree.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class treeCountClosure : public DescendTreeCensusClosure<Chunk_t, FreeList_t> { class treeCountClosure : public DescendTreeCensusClosure<Chunk_t, FreeList_t> {
public: public:
uint count; uint count;
treeCountClosure(uint c) { count = c; } treeCountClosure(uint c) { count = c; }
void do_list(FreeList_t<Chunk_t>* fl) { void do_list(FreeList_t* fl) {
count++; count++;
} }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_count() { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_count() {
treeCountClosure<Chunk_t, FreeList_t> ctc(0); treeCountClosure<Chunk_t, FreeList_t> ctc(0);
ctc.do_tree(root()); ctc.do_tree(root());
@ -1128,7 +1115,7 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_count() {
#endif // PRODUCT #endif // PRODUCT
// Calculate surpluses for the lists in the tree. // Calculate surpluses for the lists in the tree.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class setTreeSurplusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class setTreeSurplusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
double percentage; double percentage;
public: public:
@ -1144,14 +1131,14 @@ class setTreeSurplusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::set_tree_surplus(double splitSurplusPercent) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::set_tree_surplus(double splitSurplusPercent) {
setTreeSurplusClosure<Chunk_t, FreeList_t> sts(splitSurplusPercent); setTreeSurplusClosure<Chunk_t, FreeList_t> sts(splitSurplusPercent);
sts.do_tree(root()); sts.do_tree(root());
} }
// Set hints for the lists in the tree. // Set hints for the lists in the tree.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class setTreeHintsClosure : public DescendTreeCensusClosure<Chunk_t, FreeList_t> { class setTreeHintsClosure : public DescendTreeCensusClosure<Chunk_t, FreeList_t> {
size_t hint; size_t hint;
public: public:
@ -1170,14 +1157,14 @@ class setTreeHintsClosure : public DescendTreeCensusClosure<Chunk_t, FreeList_t>
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::set_tree_hints(void) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::set_tree_hints(void) {
setTreeHintsClosure<Chunk_t, FreeList_t> sth(0); setTreeHintsClosure<Chunk_t, FreeList_t> sth(0);
sth.do_tree(root()); sth.do_tree(root());
} }
// Save count before previous sweep and splits and coalesces. // Save count before previous sweep and splits and coalesces.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class clearTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class clearTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
void do_list(FreeList<Chunk_t>* fl) {} void do_list(FreeList<Chunk_t>* fl) {}
@ -1192,14 +1179,14 @@ class clearTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::clear_tree_census(void) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::clear_tree_census(void) {
clearTreeCensusClosure<Chunk_t, FreeList_t> ctc; clearTreeCensusClosure<Chunk_t, FreeList_t> ctc;
ctc.do_tree(root()); ctc.do_tree(root());
} }
// Do reporting and post sweep clean up. // Do reporting and post sweep clean up.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::end_sweep_dict_census(double splitSurplusPercent) { void BinaryTreeDictionary<Chunk_t, FreeList_t>::end_sweep_dict_census(double splitSurplusPercent) {
// Does walking the tree 3 times hurt? // Does walking the tree 3 times hurt?
set_tree_surplus(splitSurplusPercent); set_tree_surplus(splitSurplusPercent);
@ -1211,7 +1198,7 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::end_sweep_dict_census(double spl
} }
// Print summary statistics // Print summary statistics
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::report_statistics() const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::report_statistics() const {
FreeBlockDictionary<Chunk_t>::verify_par_locked(); FreeBlockDictionary<Chunk_t>::verify_par_locked();
gclog_or_tty->print("Statistics for BinaryTreeDictionary:\n" gclog_or_tty->print("Statistics for BinaryTreeDictionary:\n"
@ -1230,22 +1217,22 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::report_statistics() const {
// Print census information - counts, births, deaths, etc. // Print census information - counts, births, deaths, etc.
// for each list in the tree. Also print some summary // for each list in the tree. Also print some summary
// information. // information.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class PrintTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class PrintTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
int _print_line; int _print_line;
size_t _total_free; size_t _total_free;
FreeList_t<Chunk_t> _total; FreeList_t _total;
public: public:
PrintTreeCensusClosure() { PrintTreeCensusClosure() {
_print_line = 0; _print_line = 0;
_total_free = 0; _total_free = 0;
} }
FreeList_t<Chunk_t>* total() { return &_total; } FreeList_t* total() { return &_total; }
size_t total_free() { return _total_free; } size_t total_free() { return _total_free; }
void do_list(FreeList<Chunk_t>* fl) { void do_list(FreeList<Chunk_t>* fl) {
if (++_print_line >= 40) { if (++_print_line >= 40) {
FreeList_t<Chunk_t>::print_labels_on(gclog_or_tty, "size"); FreeList_t::print_labels_on(gclog_or_tty, "size");
_print_line = 0; _print_line = 0;
} }
fl->print_on(gclog_or_tty); fl->print_on(gclog_or_tty);
@ -1256,7 +1243,7 @@ class PrintTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_
#if INCLUDE_ALL_GCS #if INCLUDE_ALL_GCS
void do_list(AdaptiveFreeList<Chunk_t>* fl) { void do_list(AdaptiveFreeList<Chunk_t>* fl) {
if (++_print_line >= 40) { if (++_print_line >= 40) {
FreeList_t<Chunk_t>::print_labels_on(gclog_or_tty, "size"); FreeList_t::print_labels_on(gclog_or_tty, "size");
_print_line = 0; _print_line = 0;
} }
fl->print_on(gclog_or_tty); fl->print_on(gclog_or_tty);
@ -1275,16 +1262,16 @@ class PrintTreeCensusClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::print_dict_census(void) const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::print_dict_census(void) const {
gclog_or_tty->print("\nBinaryTree\n"); gclog_or_tty->print("\nBinaryTree\n");
FreeList_t<Chunk_t>::print_labels_on(gclog_or_tty, "size"); FreeList_t::print_labels_on(gclog_or_tty, "size");
PrintTreeCensusClosure<Chunk_t, FreeList_t> ptc; PrintTreeCensusClosure<Chunk_t, FreeList_t> ptc;
ptc.do_tree(root()); ptc.do_tree(root());
FreeList_t<Chunk_t>* total = ptc.total(); FreeList_t* total = ptc.total();
FreeList_t<Chunk_t>::print_labels_on(gclog_or_tty, " "); FreeList_t::print_labels_on(gclog_or_tty, " ");
} }
#if INCLUDE_ALL_GCS #if INCLUDE_ALL_GCS
@ -1293,7 +1280,7 @@ void AFLBinaryTreeDictionary::print_dict_census(void) const {
gclog_or_tty->print("\nBinaryTree\n"); gclog_or_tty->print("\nBinaryTree\n");
AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size"); AdaptiveFreeList<FreeChunk>::print_labels_on(gclog_or_tty, "size");
PrintTreeCensusClosure<FreeChunk, AdaptiveFreeList> ptc; PrintTreeCensusClosure<FreeChunk, AdaptiveFreeList<FreeChunk> > ptc;
ptc.do_tree(root()); ptc.do_tree(root());
AdaptiveFreeList<FreeChunk>* total = ptc.total(); AdaptiveFreeList<FreeChunk>* total = ptc.total();
@ -1311,7 +1298,7 @@ void AFLBinaryTreeDictionary::print_dict_census(void) const {
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class PrintFreeListsClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> { class PrintFreeListsClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t> {
outputStream* _st; outputStream* _st;
int _print_line; int _print_line;
@ -1321,9 +1308,9 @@ class PrintFreeListsClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t
_st = st; _st = st;
_print_line = 0; _print_line = 0;
} }
void do_list(FreeList_t<Chunk_t>* fl) { void do_list(FreeList_t* fl) {
if (++_print_line >= 40) { if (++_print_line >= 40) {
FreeList_t<Chunk_t>::print_labels_on(_st, "size"); FreeList_t::print_labels_on(_st, "size");
_print_line = 0; _print_line = 0;
} }
fl->print_on(gclog_or_tty); fl->print_on(gclog_or_tty);
@ -1337,10 +1324,10 @@ class PrintFreeListsClosure : public AscendTreeCensusClosure<Chunk_t, FreeList_t
} }
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::print_free_lists(outputStream* st) const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::print_free_lists(outputStream* st) const {
FreeList_t<Chunk_t>::print_labels_on(st, "size"); FreeList_t::print_labels_on(st, "size");
PrintFreeListsClosure<Chunk_t, FreeList_t> pflc(st); PrintFreeListsClosure<Chunk_t, FreeList_t> pflc(st);
pflc.do_tree(root()); pflc.do_tree(root());
} }
@ -1349,7 +1336,7 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::print_free_lists(outputStream* s
// . _root has no parent // . _root has no parent
// . parent and child point to each other // . parent and child point to each other
// . each node's key correctly related to that of its child(ren) // . each node's key correctly related to that of its child(ren)
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree() const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree() const {
guarantee(root() == NULL || total_free_blocks() == 0 || guarantee(root() == NULL || total_free_blocks() == 0 ||
total_size() != 0, "_total_size shouldn't be 0?"); total_size() != 0, "_total_size shouldn't be 0?");
@ -1357,7 +1344,7 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree() const {
verify_tree_helper(root()); verify_tree_helper(root());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_prev_free_ptrs(TreeList<Chunk_t, FreeList_t>* tl) { size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_prev_free_ptrs(TreeList<Chunk_t, FreeList_t>* tl) {
size_t ct = 0; size_t ct = 0;
for (Chunk_t* curFC = tl->head(); curFC != NULL; curFC = curFC->next()) { for (Chunk_t* curFC = tl->head(); curFC != NULL; curFC = curFC->next()) {
@ -1371,7 +1358,7 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_prev_free_ptrs(TreeList
// Note: this helper is recursive rather than iterative, so use with // Note: this helper is recursive rather than iterative, so use with
// caution on very deep trees; and watch out for stack overflow errors; // caution on very deep trees; and watch out for stack overflow errors;
// In general, to be used only for debugging. // In general, to be used only for debugging.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree_helper(TreeList<Chunk_t, FreeList_t>* tl) const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree_helper(TreeList<Chunk_t, FreeList_t>* tl) const {
if (tl == NULL) if (tl == NULL)
return; return;
@ -1400,25 +1387,25 @@ void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify_tree_helper(TreeList<Chun
verify_tree_helper(tl->right()); verify_tree_helper(tl->right());
} }
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify() const { void BinaryTreeDictionary<Chunk_t, FreeList_t>::verify() const {
verify_tree(); verify_tree();
guarantee(total_size() == total_size_in_tree(root()), "Total Size inconsistency"); guarantee(total_size() == total_size_in_tree(root()), "Total Size inconsistency");
} }
template class TreeList<Metablock, FreeList>; template class TreeList<Metablock, FreeList<Metablock> >;
template class BinaryTreeDictionary<Metablock, FreeList>; template class BinaryTreeDictionary<Metablock, FreeList<Metablock> >;
template class TreeChunk<Metablock, FreeList>; template class TreeChunk<Metablock, FreeList<Metablock> >;
template class TreeList<Metachunk, FreeList>; template class TreeList<Metachunk, FreeList<Metachunk> >;
template class BinaryTreeDictionary<Metachunk, FreeList>; template class BinaryTreeDictionary<Metachunk, FreeList<Metachunk> >;
template class TreeChunk<Metachunk, FreeList>; template class TreeChunk<Metachunk, FreeList<Metachunk> >;
#if INCLUDE_ALL_GCS #if INCLUDE_ALL_GCS
// Explicitly instantiate these types for FreeChunk. // Explicitly instantiate these types for FreeChunk.
template class TreeList<FreeChunk, AdaptiveFreeList>; template class TreeList<FreeChunk, AdaptiveFreeList<FreeChunk> >;
template class BinaryTreeDictionary<FreeChunk, AdaptiveFreeList>; template class BinaryTreeDictionary<FreeChunk, AdaptiveFreeList<FreeChunk> >;
template class TreeChunk<FreeChunk, AdaptiveFreeList>; template class TreeChunk<FreeChunk, AdaptiveFreeList<FreeChunk> >;
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS

View File

@ -37,18 +37,18 @@
// A TreeList is a FreeList which can be used to maintain a // A TreeList is a FreeList which can be used to maintain a
// binary tree of free lists. // binary tree of free lists.
template <class Chunk_t, template <class> class FreeList_t> class TreeChunk; template <class Chunk_t, class FreeList_t> class TreeChunk;
template <class Chunk_t, template <class> class FreeList_t> class BinaryTreeDictionary; template <class Chunk_t, class FreeList_t> class BinaryTreeDictionary;
template <class Chunk_t, template <class> class FreeList_t> class AscendTreeCensusClosure; template <class Chunk_t, class FreeList_t> class AscendTreeCensusClosure;
template <class Chunk_t, template <class> class FreeList_t> class DescendTreeCensusClosure; template <class Chunk_t, class FreeList_t> class DescendTreeCensusClosure;
template <class Chunk_t, template <class> class FreeList_t> class DescendTreeSearchClosure; template <class Chunk_t, class FreeList_t> class DescendTreeSearchClosure;
class FreeChunk; class FreeChunk;
template <class> class AdaptiveFreeList; template <class> class AdaptiveFreeList;
typedef BinaryTreeDictionary<FreeChunk, AdaptiveFreeList> AFLBinaryTreeDictionary; typedef BinaryTreeDictionary<FreeChunk, AdaptiveFreeList<FreeChunk> > AFLBinaryTreeDictionary;
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class TreeList : public FreeList_t<Chunk_t> { class TreeList : public FreeList_t {
friend class TreeChunk<Chunk_t, FreeList_t>; friend class TreeChunk<Chunk_t, FreeList_t>;
friend class BinaryTreeDictionary<Chunk_t, FreeList_t>; friend class BinaryTreeDictionary<Chunk_t, FreeList_t>;
friend class AscendTreeCensusClosure<Chunk_t, FreeList_t>; friend class AscendTreeCensusClosure<Chunk_t, FreeList_t>;
@ -66,12 +66,12 @@ class TreeList : public FreeList_t<Chunk_t> {
TreeList<Chunk_t, FreeList_t>* right() const { return _right; } TreeList<Chunk_t, FreeList_t>* right() const { return _right; }
// Wrapper on call to base class, to get the template to compile. // Wrapper on call to base class, to get the template to compile.
Chunk_t* head() const { return FreeList_t<Chunk_t>::head(); } Chunk_t* head() const { return FreeList_t::head(); }
Chunk_t* tail() const { return FreeList_t<Chunk_t>::tail(); } Chunk_t* tail() const { return FreeList_t::tail(); }
void set_head(Chunk_t* head) { FreeList_t<Chunk_t>::set_head(head); } void set_head(Chunk_t* head) { FreeList_t::set_head(head); }
void set_tail(Chunk_t* tail) { FreeList_t<Chunk_t>::set_tail(tail); } void set_tail(Chunk_t* tail) { FreeList_t::set_tail(tail); }
size_t size() const { return FreeList_t<Chunk_t>::size(); } size_t size() const { return FreeList_t::size(); }
// Accessors for links in tree. // Accessors for links in tree.
@ -90,7 +90,7 @@ class TreeList : public FreeList_t<Chunk_t> {
void clear_left() { _left = NULL; } void clear_left() { _left = NULL; }
void clear_right() { _right = NULL; } void clear_right() { _right = NULL; }
void clear_parent() { _parent = NULL; } void clear_parent() { _parent = NULL; }
void initialize() { clear_left(); clear_right(), clear_parent(); FreeList_t<Chunk_t>::initialize(); } void initialize() { clear_left(); clear_right(), clear_parent(); FreeList_t::initialize(); }
// For constructing a TreeList from a Tree chunk or // For constructing a TreeList from a Tree chunk or
// address and size. // address and size.
@ -139,7 +139,7 @@ class TreeList : public FreeList_t<Chunk_t> {
// on the free list for a node in the tree and is only removed if // on the free list for a node in the tree and is only removed if
// it is the last chunk on the free list. // it is the last chunk on the free list.
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class TreeChunk : public Chunk_t { class TreeChunk : public Chunk_t {
friend class TreeList<Chunk_t, FreeList_t>; friend class TreeList<Chunk_t, FreeList_t>;
TreeList<Chunk_t, FreeList_t>* _list; TreeList<Chunk_t, FreeList_t>* _list;
@ -173,7 +173,7 @@ class TreeChunk : public Chunk_t {
}; };
template <class Chunk_t, template <class> class FreeList_t> template <class Chunk_t, class FreeList_t>
class BinaryTreeDictionary: public FreeBlockDictionary<Chunk_t> { class BinaryTreeDictionary: public FreeBlockDictionary<Chunk_t> {
friend class VMStructs; friend class VMStructs;
size_t _total_size; size_t _total_size;

View File

@ -626,7 +626,6 @@ void DefNewGeneration::collect(bool full,
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::ScanningOption(so), SharedHeap::ScanningOption(so),
&fsc_with_no_gc_barrier, &fsc_with_no_gc_barrier,
true, // walk *all* scavengable nmethods
&fsc_with_gc_barrier, &fsc_with_gc_barrier,
&klass_scan_closure); &klass_scan_closure);

View File

@ -594,20 +594,12 @@ gen_process_strong_roots(int level,
bool activate_scope, bool activate_scope,
SharedHeap::ScanningOption so, SharedHeap::ScanningOption so,
OopsInGenClosure* not_older_gens, OopsInGenClosure* not_older_gens,
bool do_code_roots,
OopsInGenClosure* older_gens, OopsInGenClosure* older_gens,
KlassClosure* klass_closure) { KlassClosure* klass_closure) {
// General strong roots. // General strong roots.
if (!do_code_roots) {
SharedHeap::process_strong_roots(activate_scope, so, SharedHeap::process_strong_roots(activate_scope, so,
not_older_gens, NULL, klass_closure); not_older_gens, klass_closure);
} else {
bool do_code_marking = (activate_scope || nmethod::oops_do_marking_is_active());
CodeBlobToOopClosure code_roots(not_older_gens, /*do_marking=*/ do_code_marking);
SharedHeap::process_strong_roots(activate_scope, so,
not_older_gens, &code_roots, klass_closure);
}
if (younger_gens_as_roots) { if (younger_gens_as_roots) {
if (!_gen_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) { if (!_gen_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
@ -629,9 +621,8 @@ gen_process_strong_roots(int level,
_gen_process_strong_tasks->all_tasks_completed(); _gen_process_strong_tasks->all_tasks_completed();
} }
void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure, void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
CodeBlobClosure* code_roots) { SharedHeap::process_weak_roots(root_closure);
SharedHeap::process_weak_roots(root_closure, code_roots);
// "Local" "weak" refs // "Local" "weak" refs
for (int i = 0; i < _n_gens; i++) { for (int i = 0; i < _n_gens; i++) {
_gens[i]->ref_processor()->weak_oops_do(root_closure); _gens[i]->ref_processor()->weak_oops_do(root_closure);

View File

@ -414,15 +414,13 @@ public:
bool activate_scope, bool activate_scope,
SharedHeap::ScanningOption so, SharedHeap::ScanningOption so,
OopsInGenClosure* not_older_gens, OopsInGenClosure* not_older_gens,
bool do_code_roots,
OopsInGenClosure* older_gens, OopsInGenClosure* older_gens,
KlassClosure* klass_closure); KlassClosure* klass_closure);
// Apply "blk" to all the weak roots of the system. These include // Apply "root_closure" to all the weak roots of the system.
// JNI weak roots, the code cache, system dictionary, symbol table, // These include JNI weak roots, string table,
// string table, and referents of reachable weak refs. // and referents of reachable weak refs.
void gen_process_weak_roots(OopClosure* root_closure, void gen_process_weak_roots(OopClosure* root_closure);
CodeBlobClosure* code_roots);
// Set the saved marks of generations, if that makes sense. // Set the saved marks of generations, if that makes sense.
// In particular, if any generation might iterate over the oops // In particular, if any generation might iterate over the oops

View File

@ -212,7 +212,6 @@ void GenMarkSweep::mark_sweep_phase1(int level,
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::SO_SystemClasses, SharedHeap::SO_SystemClasses,
&follow_root_closure, &follow_root_closure,
true, // walk code active on stacks
&follow_root_closure, &follow_root_closure,
&follow_klass_closure); &follow_klass_closure);
@ -295,18 +294,12 @@ void GenMarkSweep::mark_sweep_phase3(int level) {
gch->gen_process_strong_roots(level, gch->gen_process_strong_roots(level,
false, // Younger gens are not roots. false, // Younger gens are not roots.
true, // activate StrongRootsScope true, // activate StrongRootsScope
SharedHeap::SO_AllClasses, SharedHeap::SO_AllClasses | SharedHeap::SO_Strings | SharedHeap::SO_AllCodeCache,
&adjust_pointer_closure, &adjust_pointer_closure,
false, // do not walk code
&adjust_pointer_closure, &adjust_pointer_closure,
&adjust_klass_closure); &adjust_klass_closure);
// Now adjust pointers in remaining weak roots. (All of which should gch->gen_process_weak_roots(&adjust_pointer_closure);
// have been cleared if they pointed to non-surviving objects.)
CodeBlobToOopClosure adjust_code_pointer_closure(&adjust_pointer_closure,
/*do_marking=*/ false);
gch->gen_process_weak_roots(&adjust_pointer_closure,
&adjust_code_pointer_closure);
adjust_marks(); adjust_marks();
GenAdjustPointersClosure blk; GenAdjustPointersClosure blk;

View File

@ -46,8 +46,8 @@
#include "utilities/copy.hpp" #include "utilities/copy.hpp"
#include "utilities/debug.hpp" #include "utilities/debug.hpp"
typedef BinaryTreeDictionary<Metablock, FreeList> BlockTreeDictionary; typedef BinaryTreeDictionary<Metablock, FreeList<Metablock> > BlockTreeDictionary;
typedef BinaryTreeDictionary<Metachunk, FreeList> ChunkTreeDictionary; typedef BinaryTreeDictionary<Metachunk, FreeList<Metachunk> > ChunkTreeDictionary;
// Set this constant to enable slow integrity checking of the free chunk lists // Set this constant to enable slow integrity checking of the free chunk lists
const bool metaspace_slow_verify = false; const bool metaspace_slow_verify = false;
@ -790,7 +790,7 @@ MetaWord* BlockFreelist::get_block(size_t word_size) {
return NULL; return NULL;
} }
if (word_size < TreeChunk<Metablock, FreeList>::min_size()) { if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
// Dark matter. Too small for dictionary. // Dark matter. Too small for dictionary.
return NULL; return NULL;
} }
@ -810,7 +810,7 @@ MetaWord* BlockFreelist::get_block(size_t word_size) {
MetaWord* new_block = (MetaWord*)free_block; MetaWord* new_block = (MetaWord*)free_block;
assert(block_size >= word_size, "Incorrect size of block from freelist"); assert(block_size >= word_size, "Incorrect size of block from freelist");
const size_t unused = block_size - word_size; const size_t unused = block_size - word_size;
if (unused >= TreeChunk<Metablock, FreeList>::min_size()) { if (unused >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
return_block(new_block + word_size, unused); return_block(new_block + word_size, unused);
} }
@ -2240,7 +2240,7 @@ ChunkIndex ChunkManager::list_index(size_t size) {
void SpaceManager::deallocate(MetaWord* p, size_t word_size) { void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
assert_lock_strong(_lock); assert_lock_strong(_lock);
size_t raw_word_size = get_raw_word_size(word_size); size_t raw_word_size = get_raw_word_size(word_size);
size_t min_size = TreeChunk<Metablock, FreeList>::min_size(); size_t min_size = TreeChunk<Metablock, FreeList<Metablock> >::min_size();
assert(raw_word_size >= min_size, assert(raw_word_size >= min_size,
err_msg("Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size)); err_msg("Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size));
block_freelists()->return_block(p, raw_word_size); block_freelists()->return_block(p, raw_word_size);
@ -2296,7 +2296,7 @@ void SpaceManager::add_chunk(Metachunk* new_chunk, bool make_current) {
void SpaceManager::retire_current_chunk() { void SpaceManager::retire_current_chunk() {
if (current_chunk() != NULL) { if (current_chunk() != NULL) {
size_t remaining_words = current_chunk()->free_word_size(); size_t remaining_words = current_chunk()->free_word_size();
if (remaining_words >= TreeChunk<Metablock, FreeList>::min_size()) { if (remaining_words >= TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words); block_freelists()->return_block(current_chunk()->allocate(remaining_words), remaining_words);
inc_used_metrics(remaining_words); inc_used_metrics(remaining_words);
} }
@ -3279,7 +3279,7 @@ void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
assert(Thread::current()->is_VM_thread(), "should be the VM thread"); assert(Thread::current()->is_VM_thread(), "should be the VM thread");
// Don't take Heap_lock // Don't take Heap_lock
MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag); MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
if (word_size < TreeChunk<Metablock, FreeList>::min_size()) { if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
// Dark matter. Too small for dictionary. // Dark matter. Too small for dictionary.
#ifdef ASSERT #ifdef ASSERT
Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5); Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);
@ -3294,7 +3294,7 @@ void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
} else { } else {
MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag); MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
if (word_size < TreeChunk<Metablock, FreeList>::min_size()) { if (word_size < TreeChunk<Metablock, FreeList<Metablock> >::min_size()) {
// Dark matter. Too small for dictionary. // Dark matter. Too small for dictionary.
#ifdef ASSERT #ifdef ASSERT
Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5); Copy::fill_to_words((HeapWord*)ptr, word_size, 0xf5f5f5f5);

View File

@ -139,7 +139,6 @@ SharedHeap::StrongRootsScope::~StrongRootsScope() {
void SharedHeap::process_strong_roots(bool activate_scope, void SharedHeap::process_strong_roots(bool activate_scope,
ScanningOption so, ScanningOption so,
OopClosure* roots, OopClosure* roots,
CodeBlobClosure* code_roots,
KlassClosure* klass_closure) { KlassClosure* klass_closure) {
StrongRootsScope srs(this, activate_scope); StrongRootsScope srs(this, activate_scope);
@ -156,15 +155,17 @@ void SharedHeap::process_strong_roots(bool activate_scope,
if (!_process_strong_tasks->is_task_claimed(SH_PS_JNIHandles_oops_do)) if (!_process_strong_tasks->is_task_claimed(SH_PS_JNIHandles_oops_do))
JNIHandles::oops_do(roots); JNIHandles::oops_do(roots);
CodeBlobToOopClosure code_roots(roots, true);
CLDToOopClosure roots_from_clds(roots); CLDToOopClosure roots_from_clds(roots);
// If we limit class scanning to SO_SystemClasses we need to apply a CLD closure to // If we limit class scanning to SO_SystemClasses we need to apply a CLD closure to
// CLDs which are strongly reachable from the thread stacks. // CLDs which are strongly reachable from the thread stacks.
CLDToOopClosure* roots_from_clds_p = ((so & SO_SystemClasses) ? &roots_from_clds : NULL); CLDToOopClosure* roots_from_clds_p = ((so & SO_SystemClasses) ? &roots_from_clds : NULL);
// All threads execute this; the individual threads are task groups. // All threads execute this; the individual threads are task groups.
if (CollectedHeap::use_parallel_gc_threads()) { if (CollectedHeap::use_parallel_gc_threads()) {
Threads::possibly_parallel_oops_do(roots, roots_from_clds_p, code_roots); Threads::possibly_parallel_oops_do(roots, roots_from_clds_p, &code_roots);
} else { } else {
Threads::oops_do(roots, roots_from_clds_p, code_roots); Threads::oops_do(roots, roots_from_clds_p, &code_roots);
} }
if (!_process_strong_tasks-> is_task_claimed(SH_PS_ObjectSynchronizer_oops_do)) if (!_process_strong_tasks-> is_task_claimed(SH_PS_ObjectSynchronizer_oops_do))
@ -206,17 +207,17 @@ void SharedHeap::process_strong_roots(bool activate_scope,
if (!_process_strong_tasks->is_task_claimed(SH_PS_CodeCache_oops_do)) { if (!_process_strong_tasks->is_task_claimed(SH_PS_CodeCache_oops_do)) {
if (so & SO_ScavengeCodeCache) { if (so & SO_ScavengeCodeCache) {
assert(code_roots != NULL, "must supply closure for code cache"); assert(&code_roots != NULL, "must supply closure for code cache");
// We only visit parts of the CodeCache when scavenging. // We only visit parts of the CodeCache when scavenging.
CodeCache::scavenge_root_nmethods_do(code_roots); CodeCache::scavenge_root_nmethods_do(&code_roots);
} }
if (so & SO_AllCodeCache) { if (so & SO_AllCodeCache) {
assert(code_roots != NULL, "must supply closure for code cache"); assert(&code_roots != NULL, "must supply closure for code cache");
// CMSCollector uses this to do intermediate-strength collections. // CMSCollector uses this to do intermediate-strength collections.
// We scan the entire code cache, since CodeCache::do_unloading is not called. // We scan the entire code cache, since CodeCache::do_unloading is not called.
CodeCache::blobs_do(code_roots); CodeCache::blobs_do(&code_roots);
} }
// Verify that the code cache contents are not subject to // Verify that the code cache contents are not subject to
// movement by a scavenging collection. // movement by a scavenging collection.
@ -233,13 +234,9 @@ public:
}; };
static AlwaysTrueClosure always_true; static AlwaysTrueClosure always_true;
void SharedHeap::process_weak_roots(OopClosure* root_closure, void SharedHeap::process_weak_roots(OopClosure* root_closure) {
CodeBlobClosure* code_roots) {
// Global (weak) JNI handles // Global (weak) JNI handles
JNIHandles::weak_oops_do(&always_true, root_closure); JNIHandles::weak_oops_do(&always_true, root_closure);
CodeCache::blobs_do(code_roots);
StringTable::oops_do(root_closure);
} }
void SharedHeap::set_barrier_set(BarrierSet* bs) { void SharedHeap::set_barrier_set(BarrierSet* bs) {

View File

@ -238,14 +238,10 @@ public:
void process_strong_roots(bool activate_scope, void process_strong_roots(bool activate_scope,
ScanningOption so, ScanningOption so,
OopClosure* roots, OopClosure* roots,
CodeBlobClosure* code_roots,
KlassClosure* klass_closure); KlassClosure* klass_closure);
// Apply "blk" to all the weak roots of the system. These include // Apply "root_closure" to the JNI weak roots..
// JNI weak roots, the code cache, system dictionary, symbol table, void process_weak_roots(OopClosure* root_closure);
// string table.
void process_weak_roots(OopClosure* root_closure,
CodeBlobClosure* code_roots);
// The functions below are helper functions that a subclass of // The functions below are helper functions that a subclass of
// "SharedHeap" can use in the implementation of its virtual // "SharedHeap" can use in the implementation of its virtual
@ -275,4 +271,8 @@ public:
size_t capacity); size_t capacity);
}; };
inline SharedHeap::ScanningOption operator|(SharedHeap::ScanningOption so0, SharedHeap::ScanningOption so1) {
return static_cast<SharedHeap::ScanningOption>(static_cast<int>(so0) | static_cast<int>(so1));
}
#endif // SHARE_VM_MEMORY_SHAREDHEAP_HPP #endif // SHARE_VM_MEMORY_SHAREDHEAP_HPP

View File

@ -242,7 +242,7 @@ typedef TwoOopHashtable<Klass*, mtClass> KlassTwoOopHashtable;
typedef Hashtable<Klass*, mtClass> KlassHashtable; typedef Hashtable<Klass*, mtClass> KlassHashtable;
typedef HashtableEntry<Klass*, mtClass> KlassHashtableEntry; typedef HashtableEntry<Klass*, mtClass> KlassHashtableEntry;
typedef TwoOopHashtable<Symbol*, mtClass> SymbolTwoOopHashtable; typedef TwoOopHashtable<Symbol*, mtClass> SymbolTwoOopHashtable;
typedef BinaryTreeDictionary<Metablock, FreeList> MetablockTreeDictionary; typedef BinaryTreeDictionary<Metablock, FreeList<Metablock> > MetablockTreeDictionary;
//-------------------------------------------------------------------------------- //--------------------------------------------------------------------------------
// VM_STRUCTS // VM_STRUCTS