Merge
This commit is contained in:
commit
bac53feed5
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright 1999-2005 Sun Microsystems, Inc. All Rights Reserved.
|
* Copyright 1999-2009 Sun Microsystems, Inc. All Rights Reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
@ -39,7 +39,6 @@ public class Database {
|
|||||||
private HashMap<String,String> platformDepFiles;
|
private HashMap<String,String> platformDepFiles;
|
||||||
private long threshold;
|
private long threshold;
|
||||||
private int nOuterFiles;
|
private int nOuterFiles;
|
||||||
private int nPrecompiledFiles;
|
|
||||||
private boolean missingOk;
|
private boolean missingOk;
|
||||||
private Platform plat;
|
private Platform plat;
|
||||||
/** These allow you to specify files not in the include database
|
/** These allow you to specify files not in the include database
|
||||||
@ -62,7 +61,6 @@ public class Database {
|
|||||||
|
|
||||||
threshold = t;
|
threshold = t;
|
||||||
nOuterFiles = 0;
|
nOuterFiles = 0;
|
||||||
nPrecompiledFiles = 0;
|
|
||||||
missingOk = false;
|
missingOk = false;
|
||||||
firstFile = null;
|
firstFile = null;
|
||||||
lastFile = null;
|
lastFile = null;
|
||||||
@ -343,7 +341,6 @@ public class Database {
|
|||||||
plat.getGIFileTemplate().getInvDir() +
|
plat.getGIFileTemplate().getInvDir() +
|
||||||
list.getName() +
|
list.getName() +
|
||||||
"\"");
|
"\"");
|
||||||
nPrecompiledFiles += 1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
inclFile.println();
|
inclFile.println();
|
||||||
@ -408,22 +405,22 @@ public class Database {
|
|||||||
gd.println();
|
gd.println();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nPrecompiledFiles > 0) {
|
// write Precompiled_Files = ...
|
||||||
// write Precompiled_Files = ...
|
gd.println("Precompiled_Files = \\");
|
||||||
gd.println("Precompiled_Files = \\");
|
for (Iterator iter = grandInclude.iterator(); iter.hasNext(); ) {
|
||||||
for (Iterator iter = grandInclude.iterator(); iter.hasNext(); ) {
|
FileList list = (FileList) iter.next();
|
||||||
FileList list = (FileList) iter.next();
|
if (list.getCount() >= threshold) {
|
||||||
gd.println(list.getName() + " \\");
|
gd.println(list.getName() + " \\");
|
||||||
String platformDep = platformDepFiles.get(list.getName());
|
String platformDep = platformDepFiles.get(list.getName());
|
||||||
if (platformDep != null) {
|
if (platformDep != null) {
|
||||||
// make sure changes to the platform dependent file will
|
// make sure changes to the platform dependent file will
|
||||||
// cause regeneration of the pch file.
|
// cause regeneration of the pch file.
|
||||||
gd.println(platformDep + " \\");
|
gd.println(platformDep + " \\");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
gd.println();
|
|
||||||
gd.println();
|
|
||||||
}
|
}
|
||||||
|
gd.println();
|
||||||
|
gd.println();
|
||||||
|
|
||||||
gd.println("DTraced_Files = \\");
|
gd.println("DTraced_Files = \\");
|
||||||
for (Iterator iter = outerFiles.iterator(); iter.hasNext(); ) {
|
for (Iterator iter = outerFiles.iterator(); iter.hasNext(); ) {
|
||||||
@ -483,7 +480,6 @@ public class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (plat.includeGIDependencies()
|
if (plat.includeGIDependencies()
|
||||||
&& nPrecompiledFiles > 0
|
|
||||||
&& anII.getUseGrandInclude()) {
|
&& anII.getUseGrandInclude()) {
|
||||||
gd.println(" $(Precompiled_Files) \\");
|
gd.println(" $(Precompiled_Files) \\");
|
||||||
}
|
}
|
||||||
|
@ -1233,6 +1233,41 @@ public:
|
|||||||
CardTableModRefBS::card_shift);
|
CardTableModRefBS::card_shift);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// It takes a region that's not empty (i.e., it has at least one
|
||||||
|
// live object in it and sets its corresponding bit on the region
|
||||||
|
// bitmap to 1. If the region is "starts humongous" it will also set
|
||||||
|
// to 1 the bits on the region bitmap that correspond to its
|
||||||
|
// associated "continues humongous" regions.
|
||||||
|
void set_bit_for_region(HeapRegion* hr) {
|
||||||
|
assert(!hr->continuesHumongous(), "should have filtered those out");
|
||||||
|
|
||||||
|
size_t index = hr->hrs_index();
|
||||||
|
if (!hr->startsHumongous()) {
|
||||||
|
// Normal (non-humongous) case: just set the bit.
|
||||||
|
_region_bm->par_at_put((BitMap::idx_t) index, true);
|
||||||
|
} else {
|
||||||
|
// Starts humongous case: calculate how many regions are part of
|
||||||
|
// this humongous region and then set the bit range. It might
|
||||||
|
// have been a bit more efficient to look at the object that
|
||||||
|
// spans these humongous regions to calculate their number from
|
||||||
|
// the object's size. However, it's a good idea to calculate
|
||||||
|
// this based on the metadata itself, and not the region
|
||||||
|
// contents, so that this code is not aware of what goes into
|
||||||
|
// the humongous regions (in case this changes in the future).
|
||||||
|
G1CollectedHeap* g1h = G1CollectedHeap::heap();
|
||||||
|
size_t end_index = index + 1;
|
||||||
|
while (end_index < g1h->n_regions()) {
|
||||||
|
HeapRegion* chr = g1h->region_at(end_index);
|
||||||
|
if (!chr->continuesHumongous()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
end_index += 1;
|
||||||
|
}
|
||||||
|
_region_bm->par_at_put_range((BitMap::idx_t) index,
|
||||||
|
(BitMap::idx_t) end_index, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool doHeapRegion(HeapRegion* hr) {
|
bool doHeapRegion(HeapRegion* hr) {
|
||||||
if (_co_tracker != NULL)
|
if (_co_tracker != NULL)
|
||||||
_co_tracker->update();
|
_co_tracker->update();
|
||||||
@ -1241,13 +1276,13 @@ public:
|
|||||||
_start_vtime_sec = os::elapsedVTime();
|
_start_vtime_sec = os::elapsedVTime();
|
||||||
|
|
||||||
if (hr->continuesHumongous()) {
|
if (hr->continuesHumongous()) {
|
||||||
HeapRegion* hum_start = hr->humongous_start_region();
|
// We will ignore these here and process them when their
|
||||||
// If the head region of the humongous region has been determined
|
// associated "starts humongous" region is processed (see
|
||||||
// to be alive, then all the tail regions should be marked
|
// set_bit_for_heap_region()). Note that we cannot rely on their
|
||||||
// such as well.
|
// associated "starts humongous" region to have their bit set to
|
||||||
if (_region_bm->at(hum_start->hrs_index())) {
|
// 1 since, due to the region chunking in the parallel region
|
||||||
_region_bm->par_at_put(hr->hrs_index(), 1);
|
// iteration, a "continues humongous" region might be visited
|
||||||
}
|
// before its associated "starts humongous".
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1343,14 +1378,14 @@ public:
|
|||||||
intptr_t(uintptr_t(tp) >> CardTableModRefBS::card_shift);
|
intptr_t(uintptr_t(tp) >> CardTableModRefBS::card_shift);
|
||||||
mark_card_num_range(start_card_num, last_card_num);
|
mark_card_num_range(start_card_num, last_card_num);
|
||||||
// This definitely means the region has live objects.
|
// This definitely means the region has live objects.
|
||||||
_region_bm->par_at_put(hr->hrs_index(), 1);
|
set_bit_for_region(hr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hr->add_to_marked_bytes(marked_bytes);
|
hr->add_to_marked_bytes(marked_bytes);
|
||||||
// Update the live region bitmap.
|
// Update the live region bitmap.
|
||||||
if (marked_bytes > 0) {
|
if (marked_bytes > 0) {
|
||||||
_region_bm->par_at_put(hr->hrs_index(), 1);
|
set_bit_for_region(hr);
|
||||||
}
|
}
|
||||||
hr->set_top_at_conc_mark_count(nextTop);
|
hr->set_top_at_conc_mark_count(nextTop);
|
||||||
_tot_live += hr->next_live_bytes();
|
_tot_live += hr->next_live_bytes();
|
||||||
|
Loading…
Reference in New Issue
Block a user