8301217: Remove FilteringClosure
Reviewed-by: iwalulya, tschatzl
This commit is contained in:
parent
08b24ac7aa
commit
f50cda7d45
@ -282,8 +282,7 @@ void TenuredGeneration::younger_refs_iterate(OopIterateClosure* blk) {
|
||||
// iterations; objects allocated as a result of applying the closure are
|
||||
// not included.
|
||||
|
||||
HeapWord* gen_boundary = reserved().start();
|
||||
_rs->younger_refs_in_space_iterate(space(), gen_boundary, blk);
|
||||
_rs->younger_refs_in_space_iterate(space(), blk);
|
||||
}
|
||||
|
||||
TenuredGeneration::TenuredGeneration(ReservedSpace rs,
|
||||
|
@ -112,12 +112,11 @@ void ClearNoncleanCardWrapper::do_MemRegion(MemRegion mr) {
|
||||
}
|
||||
|
||||
void CardTableRS::younger_refs_in_space_iterate(TenuredSpace* sp,
|
||||
HeapWord* gen_boundary,
|
||||
OopIterateClosure* cl) {
|
||||
verify_used_region_at_save_marks(sp);
|
||||
|
||||
const MemRegion urasm = sp->used_region_at_save_marks();
|
||||
non_clean_card_iterate(sp, gen_boundary, urasm, cl, this);
|
||||
non_clean_card_iterate(sp, urasm, cl, this);
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
@ -441,7 +440,6 @@ void CardTableRS::initialize() {
|
||||
}
|
||||
|
||||
void CardTableRS::non_clean_card_iterate(TenuredSpace* sp,
|
||||
HeapWord* gen_boundary,
|
||||
MemRegion mr,
|
||||
OopIterateClosure* cl,
|
||||
CardTableRS* ct)
|
||||
@ -451,7 +449,7 @@ void CardTableRS::non_clean_card_iterate(TenuredSpace* sp,
|
||||
}
|
||||
// clear_cl finds contiguous dirty ranges of cards to process and clear.
|
||||
|
||||
DirtyCardToOopClosure* dcto_cl = sp->new_dcto_cl(cl, gen_boundary);
|
||||
DirtyCardToOopClosure* dcto_cl = sp->new_dcto_cl(cl);
|
||||
ClearNoncleanCardWrapper clear_cl(dcto_cl, ct);
|
||||
|
||||
clear_cl.do_MemRegion(mr);
|
||||
|
@ -48,7 +48,7 @@ class CardTableRS : public CardTable {
|
||||
public:
|
||||
CardTableRS(MemRegion whole_heap);
|
||||
|
||||
void younger_refs_in_space_iterate(TenuredSpace* sp, HeapWord* gen_boundary, OopIterateClosure* cl);
|
||||
void younger_refs_in_space_iterate(TenuredSpace* sp, OopIterateClosure* cl);
|
||||
|
||||
virtual void verify_used_region_at_save_marks(Space* sp) const NOT_DEBUG_RETURN;
|
||||
|
||||
@ -72,7 +72,6 @@ public:
|
||||
// region mr in the given space and apply cl to any dirty sub-regions
|
||||
// of mr. Clears the dirty cards as they are processed.
|
||||
void non_clean_card_iterate(TenuredSpace* sp,
|
||||
HeapWord* gen_boundary,
|
||||
MemRegion mr,
|
||||
OopIterateClosure* cl,
|
||||
CardTableRS* ct);
|
||||
|
@ -100,29 +100,6 @@ class CLDScanClosure: public CLDClosure {
|
||||
void do_cld(ClassLoaderData* cld);
|
||||
};
|
||||
|
||||
#endif // INCLUDE_SERIALGC
|
||||
|
||||
class FilteringClosure: public OopIterateClosure {
|
||||
private:
|
||||
HeapWord* _boundary;
|
||||
OopIterateClosure* _cl;
|
||||
protected:
|
||||
template <class T> inline void do_oop_work(T* p);
|
||||
public:
|
||||
FilteringClosure(HeapWord* boundary, OopIterateClosure* cl) :
|
||||
OopIterateClosure(cl->ref_discoverer()), _boundary(boundary),
|
||||
_cl(cl) {}
|
||||
virtual void do_oop(oop* p);
|
||||
virtual void do_oop(narrowOop* p);
|
||||
virtual bool do_metadata() { assert(!_cl->do_metadata(), "assumption broken, must change to 'return _cl->do_metadata()'"); return false; }
|
||||
virtual void do_klass(Klass*) { ShouldNotReachHere(); }
|
||||
virtual void do_cld(ClassLoaderData*) { ShouldNotReachHere(); }
|
||||
virtual void do_method(Method*) { ShouldNotReachHere(); }
|
||||
virtual void do_nmethod(nmethod*) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
#if INCLUDE_SERIALGC
|
||||
|
||||
// Closure for scanning DefNewGeneration's weak references.
|
||||
// -- weak references are processed all at once,
|
||||
// with no notion of which generation they were in.
|
||||
|
@ -98,23 +98,6 @@ void DefNewScanClosure::barrier(T* p) {
|
||||
}
|
||||
}
|
||||
|
||||
#endif // INCLUDE_SERIALGC
|
||||
|
||||
template <class T> void FilteringClosure::do_oop_work(T* p) {
|
||||
T heap_oop = RawAccess<>::oop_load(p);
|
||||
if (!CompressedOops::is_null(heap_oop)) {
|
||||
oop obj = CompressedOops::decode_not_null(heap_oop);
|
||||
if (cast_from_oop<HeapWord*>(obj) < _boundary) {
|
||||
_cl->do_oop(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void FilteringClosure::do_oop(oop* p) { FilteringClosure::do_oop_work(p); }
|
||||
inline void FilteringClosure::do_oop(narrowOop* p) { FilteringClosure::do_oop_work(p); }
|
||||
|
||||
#if INCLUDE_SERIALGC
|
||||
|
||||
// Note similarity to FastScanClosure; the difference is that
|
||||
// the barrier set is taken care of outside this closure.
|
||||
template <class T> inline void ScanWeakRefClosure::do_oop_work(T* p) {
|
||||
|
@ -76,16 +76,7 @@ void DirtyCardToOopClosure::walk_mem_region(MemRegion mr,
|
||||
// examining cards here.
|
||||
assert(bottom < top, "ought to be at least one obj on a dirty card.");
|
||||
|
||||
if (_boundary != NULL) {
|
||||
// We have a boundary outside of which we don't want to look
|
||||
// at objects, so create a filtering closure around the
|
||||
// oop closure before walking the region.
|
||||
FilteringClosure filter(_boundary, _cl);
|
||||
walk_mem_region_with_cl(mr, bottom, top, &filter);
|
||||
} else {
|
||||
// No boundary, simply walk the heap with the oop closure.
|
||||
walk_mem_region_with_cl(mr, bottom, top, _cl);
|
||||
}
|
||||
walk_mem_region_with_cl(mr, bottom, top, _cl);
|
||||
}
|
||||
|
||||
// We get called with "mr" representing the dirty region
|
||||
@ -140,38 +131,28 @@ void DirtyCardToOopClosure::do_MemRegion(MemRegion mr) {
|
||||
_min_done = bottom;
|
||||
}
|
||||
|
||||
// We must replicate this so that the static type of "FilteringClosure"
|
||||
// (see above) is apparent at the oop_iterate calls.
|
||||
#define DirtyCardToOopClosure__walk_mem_region_with_cl_DEFN(ClosureType) \
|
||||
void DirtyCardToOopClosure::walk_mem_region_with_cl(MemRegion mr, \
|
||||
HeapWord* bottom, \
|
||||
HeapWord* top, \
|
||||
ClosureType* cl) { \
|
||||
bottom += cast_to_oop(bottom)->oop_iterate_size(cl, mr); \
|
||||
if (bottom < top) { \
|
||||
HeapWord* next_obj = bottom + cast_to_oop(bottom)->size(); \
|
||||
while (next_obj < top) { \
|
||||
/* Bottom lies entirely below top, so we can call the */ \
|
||||
/* non-memRegion version of oop_iterate below. */ \
|
||||
cast_to_oop(bottom)->oop_iterate(cl); \
|
||||
bottom = next_obj; \
|
||||
next_obj = bottom + cast_to_oop(bottom)->size(); \
|
||||
} \
|
||||
/* Last object. */ \
|
||||
cast_to_oop(bottom)->oop_iterate(cl, mr); \
|
||||
} \
|
||||
void DirtyCardToOopClosure::walk_mem_region_with_cl(MemRegion mr,
|
||||
HeapWord* bottom,
|
||||
HeapWord* top,
|
||||
OopIterateClosure* cl) {
|
||||
bottom += cast_to_oop(bottom)->oop_iterate_size(cl, mr);
|
||||
if (bottom < top) {
|
||||
HeapWord* next_obj = bottom + cast_to_oop(bottom)->size();
|
||||
while (next_obj < top) {
|
||||
/* Bottom lies entirely below top, so we can call the */
|
||||
/* non-memRegion version of oop_iterate below. */
|
||||
cast_to_oop(bottom)->oop_iterate(cl);
|
||||
bottom = next_obj;
|
||||
next_obj = bottom + cast_to_oop(bottom)->size();
|
||||
}
|
||||
/* Last object. */
|
||||
cast_to_oop(bottom)->oop_iterate(cl, mr);
|
||||
}
|
||||
}
|
||||
|
||||
// (There are only two of these, rather than N, because the split is due
|
||||
// only to the introduction of the FilteringClosure, a local part of the
|
||||
// impl of this abstraction.)
|
||||
DirtyCardToOopClosure__walk_mem_region_with_cl_DEFN(OopIterateClosure)
|
||||
DirtyCardToOopClosure__walk_mem_region_with_cl_DEFN(FilteringClosure)
|
||||
|
||||
DirtyCardToOopClosure*
|
||||
ContiguousSpace::new_dcto_cl(OopIterateClosure* cl,
|
||||
HeapWord* boundary) {
|
||||
return new DirtyCardToOopClosure(this, cl, boundary);
|
||||
ContiguousSpace::new_dcto_cl(OopIterateClosure* cl) {
|
||||
return new DirtyCardToOopClosure(this, cl);
|
||||
}
|
||||
|
||||
void Space::initialize(MemRegion mr,
|
||||
|
@ -243,8 +243,6 @@ class DirtyCardToOopClosure: public MemRegionClosureRO {
|
||||
protected:
|
||||
OopIterateClosure* _cl;
|
||||
Space* _sp;
|
||||
HeapWord* _boundary; // If non-NULL, process only non-NULL oops
|
||||
// pointing below boundary.
|
||||
HeapWord* _min_done; // Need a downwards traversal to compensate
|
||||
// imprecise write barrier; this is the
|
||||
// lowest location already done (or,
|
||||
@ -278,15 +276,9 @@ protected:
|
||||
void walk_mem_region_with_cl(MemRegion mr,
|
||||
HeapWord* bottom, HeapWord* top,
|
||||
OopIterateClosure* cl);
|
||||
void walk_mem_region_with_cl(MemRegion mr,
|
||||
HeapWord* bottom, HeapWord* top,
|
||||
FilteringClosure* cl);
|
||||
|
||||
public:
|
||||
DirtyCardToOopClosure(Space* sp, OopIterateClosure* cl,
|
||||
HeapWord* boundary) :
|
||||
_cl(cl), _sp(sp), _boundary(boundary),
|
||||
_min_done(NULL) {
|
||||
DirtyCardToOopClosure(Space* sp, OopIterateClosure* cl) :
|
||||
_cl(cl), _sp(sp), _min_done(NULL) {
|
||||
NOT_PRODUCT(_last_bottom = NULL);
|
||||
}
|
||||
|
||||
@ -481,8 +473,7 @@ class ContiguousSpace: public CompactibleSpace {
|
||||
set_top(compaction_top());
|
||||
}
|
||||
|
||||
DirtyCardToOopClosure* new_dcto_cl(OopIterateClosure* cl,
|
||||
HeapWord* boundary);
|
||||
DirtyCardToOopClosure* new_dcto_cl(OopIterateClosure* cl);
|
||||
|
||||
// Apply "blk->do_oop" to the addresses of all reference fields in objects
|
||||
// starting with the _saved_mark_word, which was noted during a generation's
|
||||
|
Loading…
x
Reference in New Issue
Block a user