This commit is contained in:
Jesper Wilhelmsson 2017-08-22 16:20:30 +02:00
commit 7b25384c71
17 changed files with 811 additions and 404 deletions

View File

@ -1106,34 +1106,12 @@ suite = {
"jdk.tools.jaotc.binformat" : {
"subDir" : "../jdk.aot/share/classes",
"sourceDirs" : ["src"],
"dependencies" : [
"jdk.tools.jaotc.jnilibelf",
],
"generatedDependencies" : [
"org.graalvm.compiler.hotspot",
],
"checkstyle" : "jdk.tools.jaotc",
"javaCompliance" : "1.8",
},
"jdk.tools.jaotc.jnilibelf" : {
"subDir" : "../jdk.aot/share/classes",
"sourceDirs" : ["src"],
"dependencies" : [],
"checkstyle" : "jdk.tools.jaotc",
"javaCompliance" : "1.8",
},
"jdk.tools.jaotc.jnilibelf.test" : {
"subDir" : "../../test/compiler/aot",
"sourceDirs" : ["src"],
"dependencies" : [
"jdk.tools.jaotc.jnilibelf",
],
"checkstyle" : "jdk.tools.jaotc",
"javaCompliance" : "1.8",
},
},
"distributions" : {

View File

@ -38,12 +38,12 @@
#include "gc/g1/g1CollectorPolicy.hpp"
#include "gc/g1/g1CollectorState.hpp"
#include "gc/g1/g1EvacStats.inline.hpp"
#include "gc/g1/g1FullGCScope.hpp"
#include "gc/g1/g1GCPhaseTimes.hpp"
#include "gc/g1/g1HeapSizingPolicy.hpp"
#include "gc/g1/g1HeapTransition.hpp"
#include "gc/g1/g1HeapVerifier.hpp"
#include "gc/g1/g1HotCardCache.hpp"
#include "gc/g1/g1MarkSweep.hpp"
#include "gc/g1/g1OopClosures.inline.hpp"
#include "gc/g1/g1ParScanThreadState.inline.hpp"
#include "gc/g1/g1Policy.hpp"
@ -51,6 +51,7 @@
#include "gc/g1/g1RemSet.inline.hpp"
#include "gc/g1/g1RootClosures.hpp"
#include "gc/g1/g1RootProcessor.hpp"
#include "gc/g1/g1SerialFullCollector.hpp"
#include "gc/g1/g1StringDedup.hpp"
#include "gc/g1/g1YCTypes.hpp"
#include "gc/g1/heapRegion.inline.hpp"
@ -1062,73 +1063,6 @@ HeapWord* G1CollectedHeap::attempt_allocation_at_safepoint(size_t word_size,
ShouldNotReachHere();
}
class PostMCRemSetClearClosure: public HeapRegionClosure {
G1CollectedHeap* _g1h;
ModRefBarrierSet* _mr_bs;
public:
PostMCRemSetClearClosure(G1CollectedHeap* g1h, ModRefBarrierSet* mr_bs) :
_g1h(g1h), _mr_bs(mr_bs) {}
bool doHeapRegion(HeapRegion* r) {
HeapRegionRemSet* hrrs = r->rem_set();
_g1h->reset_gc_time_stamps(r);
if (r->is_continues_humongous()) {
// We'll assert that the strong code root list and RSet is empty
assert(hrrs->strong_code_roots_list_length() == 0, "sanity");
assert(hrrs->occupied() == 0, "RSet should be empty");
} else {
hrrs->clear();
}
// You might think here that we could clear just the cards
// corresponding to the used region. But no: if we leave a dirty card
// in a region we might allocate into, then it would prevent that card
// from being enqueued, and cause it to be missed.
// Re: the performance cost: we shouldn't be doing full GC anyway!
_mr_bs->clear(MemRegion(r->bottom(), r->end()));
return false;
}
};
void G1CollectedHeap::clear_rsets_post_compaction() {
PostMCRemSetClearClosure rs_clear(this, g1_barrier_set());
heap_region_iterate(&rs_clear);
}
class RebuildRSOutOfRegionClosure: public HeapRegionClosure {
G1CollectedHeap* _g1h;
RebuildRSOopClosure _cl;
public:
RebuildRSOutOfRegionClosure(G1CollectedHeap* g1, uint worker_i = 0) :
_cl(g1->g1_rem_set(), worker_i),
_g1h(g1)
{ }
bool doHeapRegion(HeapRegion* r) {
if (!r->is_continues_humongous()) {
_cl.set_from(r);
r->oop_iterate(&_cl);
}
return false;
}
};
class ParRebuildRSTask: public AbstractGangTask {
G1CollectedHeap* _g1;
HeapRegionClaimer _hrclaimer;
public:
ParRebuildRSTask(G1CollectedHeap* g1) :
AbstractGangTask("ParRebuildRSTask"), _g1(g1), _hrclaimer(g1->workers()->active_workers()) {}
void work(uint worker_id) {
RebuildRSOutOfRegionClosure rebuild_rs(_g1, worker_id);
_g1->heap_region_par_iterate(&rebuild_rs, worker_id, &_hrclaimer);
}
};
class PostCompactionPrinterClosure: public HeapRegionClosure {
private:
G1HRPrinter* _hr_printer;
@ -1151,252 +1085,183 @@ void G1CollectedHeap::print_hrm_post_compaction() {
}
void G1CollectedHeap::abort_concurrent_cycle() {
// Note: When we have a more flexible GC logging framework that
// allows us to add optional attributes to a GC log record we
// could consider timing and reporting how long we wait in the
// following two methods.
wait_while_free_regions_coming();
// If we start the compaction before the CM threads finish
// scanning the root regions we might trip them over as we'll
// be moving objects / updating references. So let's wait until
// they are done. By telling them to abort, they should complete
// early.
_cm->root_regions()->abort();
_cm->root_regions()->wait_until_scan_finished();
append_secondary_free_list_if_not_empty_with_lock();
// Disable discovery and empty the discovered lists
// for the CM ref processor.
ref_processor_cm()->disable_discovery();
ref_processor_cm()->abandon_partial_discovery();
ref_processor_cm()->verify_no_references_recorded();
// Abandon current iterations of concurrent marking and concurrent
// refinement, if any are in progress.
concurrent_mark()->abort();
}
void G1CollectedHeap::prepare_heap_for_full_collection() {
// Make sure we'll choose a new allocation region afterwards.
_allocator->release_mutator_alloc_region();
_allocator->abandon_gc_alloc_regions();
g1_rem_set()->cleanupHRRS();
// We may have added regions to the current incremental collection
// set between the last GC or pause and now. We need to clear the
// incremental collection set and then start rebuilding it afresh
// after this full GC.
abandon_collection_set(collection_set());
tear_down_region_sets(false /* free_list_only */);
collector_state()->set_gcs_are_young(true);
}
void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
assert(used() == recalculate_used(), "Should be equal");
_verifier->verify_region_sets_optional();
_verifier->verify_before_gc();
_verifier->check_bitmaps("Full GC Start");
}
void G1CollectedHeap::prepare_heap_for_mutators() {
// Delete metaspaces for unloaded class loaders and clean up loader_data graph
ClassLoaderDataGraph::purge();
MetaspaceAux::verify_metrics();
// Prepare heap for normal collections.
assert(num_free_regions() == 0, "we should not have added any free regions");
rebuild_region_sets(false /* free_list_only */);
abort_refinement();
resize_if_necessary_after_full_collection();
// Rebuild the strong code root lists for each region
rebuild_strong_code_roots();
// Start a new incremental collection set for the next pause
start_new_collection_set();
_allocator->init_mutator_alloc_region();
// Post collection state updates.
MetaspaceGC::compute_new_size();
}
void G1CollectedHeap::abort_refinement() {
if (_hot_card_cache->use_cache()) {
_hot_card_cache->reset_card_counts();
_hot_card_cache->reset_hot_cache();
}
// Discard all remembered set updates.
JavaThread::dirty_card_queue_set().abandon_logs();
assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
}
void G1CollectedHeap::verify_after_full_collection() {
check_gc_time_stamps();
_hrm.verify_optional();
_verifier->verify_region_sets_optional();
_verifier->verify_after_gc();
// Clear the previous marking bitmap, if needed for bitmap verification.
// Note we cannot do this when we clear the next marking bitmap in
// G1ConcurrentMark::abort() above since VerifyDuringGC verifies the
// objects marked during a full GC against the previous bitmap.
// But we need to clear it before calling check_bitmaps below since
// the full GC has compacted objects and updated TAMS but not updated
// the prev bitmap.
if (G1VerifyBitmaps) {
GCTraceTime(Debug, gc)("Clear Bitmap for Verification");
_cm->clear_prev_bitmap(workers());
}
_verifier->check_bitmaps("Full GC End");
// At this point there should be no regions in the
// entire heap tagged as young.
assert(check_young_list_empty(), "young list should be empty at this point");
// Note: since we've just done a full GC, concurrent
// marking is no longer active. Therefore we need not
// re-enable reference discovery for the CM ref processor.
// That will be done at the start of the next marking cycle.
// We also know that the STW processor should no longer
// discover any new references.
assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
ref_processor_stw()->verify_no_references_recorded();
ref_processor_cm()->verify_no_references_recorded();
}
void G1CollectedHeap::print_heap_after_full_collection(G1HeapTransition* heap_transition) {
print_hrm_post_compaction();
heap_transition->print();
print_heap_after_gc();
print_heap_regions();
#ifdef TRACESPINNING
ParallelTaskTerminator::print_termination_counts();
#endif
}
void G1CollectedHeap::do_full_collection_inner(G1FullGCScope* scope) {
GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
g1_policy()->record_full_collection_start();
print_heap_before_gc();
print_heap_regions();
abort_concurrent_cycle();
verify_before_full_collection(scope->is_explicit_gc());
gc_prologue(true);
prepare_heap_for_full_collection();
G1SerialFullCollector serial(scope, ref_processor_stw());
serial.prepare_collection();
serial.collect();
serial.complete_collection();
prepare_heap_for_mutators();
g1_policy()->record_full_collection_end();
gc_epilogue(true);
// Post collection verification.
verify_after_full_collection();
// Post collection logging.
// We should do this after we potentially resize the heap so
// that all the COMMIT / UNCOMMIT events are generated before
// the compaction events.
print_heap_after_full_collection(scope->heap_transition());
}
bool G1CollectedHeap::do_full_collection(bool explicit_gc,
bool clear_all_soft_refs) {
assert_at_safepoint(true /* should_be_vm_thread */);
if (GCLocker::check_active_before_gc()) {
// Full GC was not completed.
return false;
}
STWGCTimer* gc_timer = G1MarkSweep::gc_timer();
gc_timer->register_gc_start();
SerialOldTracer* gc_tracer = G1MarkSweep::gc_tracer();
GCIdMark gc_id_mark;
gc_tracer->report_gc_start(gc_cause(), gc_timer->gc_start());
SvcGCMarker sgcm(SvcGCMarker::FULL);
ResourceMark rm;
print_heap_before_gc();
print_heap_regions();
trace_heap_before_gc(gc_tracer);
size_t metadata_prev_used = MetaspaceAux::used_bytes();
_verifier->verify_region_sets_optional();
const bool do_clear_all_soft_refs = clear_all_soft_refs ||
collector_policy()->should_clear_all_soft_refs();
collector_policy()->should_clear_all_soft_refs();
ClearedAllSoftRefs casr(do_clear_all_soft_refs, collector_policy());
{
IsGCActiveMark x;
// Timing
assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
GCTraceCPUTime tcpu;
{
GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
TraceCollectorStats tcs(g1mm()->full_collection_counters());
TraceMemoryManagerStats tms(true /* fullGC */, gc_cause());
G1HeapTransition heap_transition(this);
g1_policy()->record_full_collection_start();
// Note: When we have a more flexible GC logging framework that
// allows us to add optional attributes to a GC log record we
// could consider timing and reporting how long we wait in the
// following two methods.
wait_while_free_regions_coming();
// If we start the compaction before the CM threads finish
// scanning the root regions we might trip them over as we'll
// be moving objects / updating references. So let's wait until
// they are done. By telling them to abort, they should complete
// early.
_cm->root_regions()->abort();
_cm->root_regions()->wait_until_scan_finished();
append_secondary_free_list_if_not_empty_with_lock();
gc_prologue(true);
increment_total_collections(true /* full gc */);
increment_old_marking_cycles_started();
assert(used() == recalculate_used(), "Should be equal");
_verifier->verify_before_gc();
_verifier->check_bitmaps("Full GC Start");
pre_full_gc_dump(gc_timer);
#if defined(COMPILER2) || INCLUDE_JVMCI
DerivedPointerTable::clear();
#endif
// Disable discovery and empty the discovered lists
// for the CM ref processor.
ref_processor_cm()->disable_discovery();
ref_processor_cm()->abandon_partial_discovery();
ref_processor_cm()->verify_no_references_recorded();
// Abandon current iterations of concurrent marking and concurrent
// refinement, if any are in progress.
concurrent_mark()->abort();
// Make sure we'll choose a new allocation region afterwards.
_allocator->release_mutator_alloc_region();
_allocator->abandon_gc_alloc_regions();
g1_rem_set()->cleanupHRRS();
// We may have added regions to the current incremental collection
// set between the last GC or pause and now. We need to clear the
// incremental collection set and then start rebuilding it afresh
// after this full GC.
abandon_collection_set(collection_set());
tear_down_region_sets(false /* free_list_only */);
collector_state()->set_gcs_are_young(true);
// See the comments in g1CollectedHeap.hpp and
// G1CollectedHeap::ref_processing_init() about
// how reference processing currently works in G1.
// Temporarily make discovery by the STW ref processor single threaded (non-MT).
ReferenceProcessorMTDiscoveryMutator stw_rp_disc_ser(ref_processor_stw(), false);
// Temporarily clear the STW ref processor's _is_alive_non_header field.
ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
ref_processor_stw()->enable_discovery();
ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
// Do collection work
{
HandleMark hm; // Discard invalid handles created during gc
G1MarkSweep::invoke_at_safepoint(ref_processor_stw(), do_clear_all_soft_refs);
}
assert(num_free_regions() == 0, "we should not have added any free regions");
rebuild_region_sets(false /* free_list_only */);
ReferenceProcessorPhaseTimes pt(NULL, ref_processor_stw()->num_q());
// Enqueue any discovered reference objects that have
// not been removed from the discovered lists.
ref_processor_stw()->enqueue_discovered_references(NULL, &pt);
pt.print_enqueue_phase();
#if defined(COMPILER2) || INCLUDE_JVMCI
DerivedPointerTable::update_pointers();
#endif
MemoryService::track_memory_usage();
assert(!ref_processor_stw()->discovery_enabled(), "Postcondition");
ref_processor_stw()->verify_no_references_recorded();
// Delete metaspaces for unloaded class loaders and clean up loader_data graph
ClassLoaderDataGraph::purge();
MetaspaceAux::verify_metrics();
// Note: since we've just done a full GC, concurrent
// marking is no longer active. Therefore we need not
// re-enable reference discovery for the CM ref processor.
// That will be done at the start of the next marking cycle.
assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
ref_processor_cm()->verify_no_references_recorded();
reset_gc_time_stamp();
// Since everything potentially moved, we will clear all remembered
// sets, and clear all cards. Later we will rebuild remembered
// sets. We will also reset the GC time stamps of the regions.
clear_rsets_post_compaction();
check_gc_time_stamps();
resize_if_necessary_after_full_collection();
// We should do this after we potentially resize the heap so
// that all the COMMIT / UNCOMMIT events are generated before
// the compaction events.
print_hrm_post_compaction();
if (_hot_card_cache->use_cache()) {
_hot_card_cache->reset_card_counts();
_hot_card_cache->reset_hot_cache();
}
// Rebuild remembered sets of all regions.
uint n_workers =
AdaptiveSizePolicy::calc_active_workers(workers()->total_workers(),
workers()->active_workers(),
Threads::number_of_non_daemon_threads());
workers()->update_active_workers(n_workers);
log_info(gc,task)("Using %u workers of %u to rebuild remembered set", n_workers, workers()->total_workers());
ParRebuildRSTask rebuild_rs_task(this);
workers()->run_task(&rebuild_rs_task);
// Rebuild the strong code root lists for each region
rebuild_strong_code_roots();
if (true) { // FIXME
MetaspaceGC::compute_new_size();
}
#ifdef TRACESPINNING
ParallelTaskTerminator::print_termination_counts();
#endif
// Discard all rset updates
JavaThread::dirty_card_queue_set().abandon_logs();
assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
// At this point there should be no regions in the
// entire heap tagged as young.
assert(check_young_list_empty(), "young list should be empty at this point");
// Update the number of full collections that have been completed.
increment_old_marking_cycles_completed(false /* concurrent */);
_hrm.verify_optional();
_verifier->verify_region_sets_optional();
_verifier->verify_after_gc();
// Clear the previous marking bitmap, if needed for bitmap verification.
// Note we cannot do this when we clear the next marking bitmap in
// G1ConcurrentMark::abort() above since VerifyDuringGC verifies the
// objects marked during a full GC against the previous bitmap.
// But we need to clear it before calling check_bitmaps below since
// the full GC has compacted objects and updated TAMS but not updated
// the prev bitmap.
if (G1VerifyBitmaps) {
GCTraceTime(Debug, gc)("Clear Bitmap for Verification");
_cm->clear_prev_bitmap(workers());
}
_verifier->check_bitmaps("Full GC End");
start_new_collection_set();
_allocator->init_mutator_alloc_region();
g1_policy()->record_full_collection_end();
// We must call G1MonitoringSupport::update_sizes() in the same scoping level
// as an active TraceMemoryManagerStats object (i.e. before the destructor for the
// TraceMemoryManagerStats is called) so that the G1 memory pools are updated
// before any GC notifications are raised.
g1mm()->update_sizes();
gc_epilogue(true);
heap_transition.print();
print_heap_after_gc();
print_heap_regions();
trace_heap_after_gc(gc_tracer);
post_full_gc_dump(gc_timer);
}
gc_timer->register_gc_end();
gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
}
G1FullGCScope scope(explicit_gc, do_clear_all_soft_refs);
do_full_collection_inner(&scope);
// Full collection was successfully completed.
return true;
}
@ -2677,21 +2542,37 @@ G1CollectedHeap* G1CollectedHeap::heap() {
return (G1CollectedHeap*)heap;
}
void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
void G1CollectedHeap::gc_prologue(bool full) {
// always_do_update_barrier = false;
assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
double start = os::elapsedTime();
// This summary needs to be printed before incrementing total collections.
g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
// Update common counters.
increment_total_collections(full /* full gc */);
if (full) {
increment_old_marking_cycles_started();
reset_gc_time_stamp();
} else {
increment_gc_time_stamp();
}
// Fill TLAB's and such
double start = os::elapsedTime();
accumulate_statistics_all_tlabs();
ensure_parsability(true);
g1_policy()->phase_times()->record_prepare_tlab_time_ms((os::elapsedTime() - start) * 1000.0);
g1_rem_set()->print_periodic_summary_info("Before GC RS summary", total_collections());
}
void G1CollectedHeap::gc_epilogue(bool full) {
// we are at the end of the GC. Total collections has already been increased.
// Update common counters.
if (full) {
// Update the number of full collections that have been completed.
increment_old_marking_cycles_completed(false /* concurrent */);
}
// We are at the end of the GC. Total collections has already been increased.
g1_rem_set()->print_periodic_summary_info("After GC RS summary", total_collections() - 1);
// FIXME: what is this about?
@ -2708,6 +2589,7 @@ void G1CollectedHeap::gc_epilogue(bool full) {
allocation_context_stats().update(full);
MemoryService::track_memory_usage();
// We have just completed a GC. Update the soft reference
// policy with the new heap occupancy
Universe::update_heap_info_at_gc();
@ -3098,8 +2980,6 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
IsGCActiveMark x;
gc_prologue(false);
increment_total_collections(false /* full gc */);
increment_gc_time_stamp();
if (VerifyRememberedSets) {
log_info(gc, verify)("[Verifying RemSets before GC]");
@ -3261,8 +3141,6 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
evacuation_info.set_collectionset_used_before(collection_set()->bytes_used_before());
evacuation_info.set_bytes_copied(g1_policy()->bytes_copied_during_gc());
MemoryService::track_memory_usage();
if (VerifyRememberedSets) {
log_info(gc, verify)("[Verifying RemSets after GC]");
VerifyRegionRemSetClosure v_cl;

View File

@ -34,6 +34,7 @@
#include "gc/g1/g1EdenRegions.hpp"
#include "gc/g1/g1EvacFailure.hpp"
#include "gc/g1/g1EvacStats.hpp"
#include "gc/g1/g1HeapTransition.hpp"
#include "gc/g1/g1HeapVerifier.hpp"
#include "gc/g1/g1HRPrinter.hpp"
#include "gc/g1/g1InCSetState.hpp"
@ -86,6 +87,7 @@ class Ticks;
class WorkGang;
class G1Allocator;
class G1ArchiveAllocator;
class G1FullGCScope;
class G1HeapVerifier;
class G1HeapSizingPolicy;
class G1HeapSummary;
@ -513,6 +515,17 @@ protected:
AllocationContext_t context,
bool* succeeded);
private:
// Internal helpers used during full GC to split it up to
// increase readability.
void do_full_collection_inner(G1FullGCScope* scope);
void abort_concurrent_cycle();
void verify_before_full_collection(bool explicit_gc);
void prepare_heap_for_full_collection();
void prepare_heap_for_mutators();
void abort_refinement();
void verify_after_full_collection();
void print_heap_after_full_collection(G1HeapTransition* heap_transition);
// Helper method for satisfy_failed_allocation()
HeapWord* satisfy_failed_allocation_helper(size_t word_size,
AllocationContext_t context,

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "gc/g1/g1FullGCScope.hpp"
G1FullGCScope* G1FullGCScope::_instance = NULL;
G1FullGCScope* G1FullGCScope::instance() {
assert(_instance != NULL, "Must be setup already");
return _instance;
}
G1FullGCScope::G1FullGCScope(bool explicit_gc, bool clear_soft) :
_rm(),
_explicit_gc(explicit_gc),
_g1h(G1CollectedHeap::heap()),
_gc_id(),
_svc_marker(SvcGCMarker::FULL),
_timer(),
_tracer(),
_active(),
_cpu_time(),
_soft_refs(clear_soft, _g1h->collector_policy()),
_memory_stats(true, _g1h->gc_cause()),
_collector_stats(_g1h->g1mm()->full_collection_counters()),
_heap_transition(_g1h) {
assert(_instance == NULL, "Only one scope at a time");
_timer.register_gc_start();
_tracer.report_gc_start(_g1h->gc_cause(), _timer.gc_start());
_g1h->pre_full_gc_dump(&_timer);
_g1h->trace_heap_before_gc(&_tracer);
_instance = this;
}
G1FullGCScope::~G1FullGCScope() {
// We must call G1MonitoringSupport::update_sizes() in the same scoping level
// as an active TraceMemoryManagerStats object (i.e. before the destructor for the
// TraceMemoryManagerStats is called) so that the G1 memory pools are updated
// before any GC notifications are raised.
_g1h->g1mm()->update_sizes();
_g1h->trace_heap_after_gc(&_tracer);
_g1h->post_full_gc_dump(&_timer);
_timer.register_gc_end();
_tracer.report_gc_end(_timer.gc_end(), _timer.time_partitions());
_instance = NULL;
}
bool G1FullGCScope::is_explicit_gc() {
return _explicit_gc;
}
bool G1FullGCScope::should_clear_soft_refs() {
return _soft_refs.should_clear();
}
STWGCTimer* G1FullGCScope::timer() {
return &_timer;
}
SerialOldTracer* G1FullGCScope::tracer() {
return &_tracer;
}
G1HeapTransition* G1FullGCScope::heap_transition() {
return &_heap_transition;
}

View File

@ -0,0 +1,72 @@
/*
* Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_VM_GC_G1_G1FULLGCSCOPE_HPP
#define SHARE_VM_GC_G1_G1FULLGCSCOPE_HPP
#include "gc/g1/g1CollectedHeap.hpp"
#include "gc/g1/g1HeapTransition.hpp"
#include "gc/shared/collectorCounters.hpp"
#include "gc/shared/gcId.hpp"
#include "gc/shared/gcTrace.hpp"
#include "gc/shared/gcTraceTime.hpp"
#include "gc/shared/gcTimer.hpp"
#include "gc/shared/isGCActiveMark.hpp"
#include "gc/shared/vmGCOperations.hpp"
#include "memory/allocation.hpp"
#include "services/memoryService.hpp"
// Class used to group scoped objects used in the Full GC together.
class G1FullGCScope : public StackObj {
ResourceMark _rm;
bool _explicit_gc;
G1CollectedHeap* _g1h;
GCIdMark _gc_id;
SvcGCMarker _svc_marker;
STWGCTimer _timer;
SerialOldTracer _tracer;
IsGCActiveMark _active;
GCTraceCPUTime _cpu_time;
ClearedAllSoftRefs _soft_refs;
TraceCollectorStats _collector_stats;
TraceMemoryManagerStats _memory_stats;
G1HeapTransition _heap_transition;
// Singleton instance.
static G1FullGCScope* _instance;
public:
static G1FullGCScope* instance();
G1FullGCScope(bool explicit_gc, bool clear_soft);
~G1FullGCScope();
bool is_explicit_gc();
bool should_clear_soft_refs();
STWGCTimer* timer();
SerialOldTracer* tracer();
G1HeapTransition* heap_transition();
};
#endif //SHARE_VM_GC_G1_G1FULLGCSCOPE_HPP

View File

@ -29,6 +29,7 @@
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
#include "code/icBuffer.hpp"
#include "gc/g1/g1FullGCScope.hpp"
#include "gc/g1/g1MarkSweep.hpp"
#include "gc/g1/g1RootProcessor.hpp"
#include "gc/g1/g1StringDedup.hpp"
@ -59,7 +60,11 @@ class HeapRegion;
void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
bool clear_all_softrefs) {
assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
HandleMark hm; // Discard invalid handles created during gc
#if defined(COMPILER2) || INCLUDE_JVMCI
DerivedPointerTable::clear();
#endif
#ifdef ASSERT
if (G1CollectedHeap::heap()->collector_policy()->should_clear_all_soft_refs()) {
assert(clear_all_softrefs, "Policy should have been checked earler");
@ -85,8 +90,10 @@ void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
// The marking doesn't preserve the marks of biased objects.
BiasedLocking::preserve_marks();
// Process roots and do the marking.
mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
// Prepare compaction.
mark_sweep_phase2();
#if defined(COMPILER2) || INCLUDE_JVMCI
@ -94,14 +101,21 @@ void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
DerivedPointerTable::set_active(false);
#endif
// Adjust all pointers.
mark_sweep_phase3();
// Do the actual compaction.
mark_sweep_phase4();
GenMarkSweep::restore_marks();
BiasedLocking::restore_marks();
GenMarkSweep::deallocate_stacks();
#if defined(COMPILER2) || INCLUDE_JVMCI
// Now update the derived pointers.
DerivedPointerTable::update_pointers();
#endif
CodeCache::gc_epilogue();
JvmtiExport::gc_epilogue();
@ -109,6 +123,13 @@ void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
GenMarkSweep::set_ref_processor(NULL);
}
STWGCTimer* G1MarkSweep::gc_timer() {
return G1FullGCScope::instance()->timer();
}
SerialOldTracer* G1MarkSweep::gc_tracer() {
return G1FullGCScope::instance()->tracer();
}
void G1MarkSweep::allocate_stacks() {
GenMarkSweep::_preserved_count_max = 0;

View File

@ -52,8 +52,8 @@ class G1MarkSweep : AllStatic {
static void invoke_at_safepoint(ReferenceProcessor* rp,
bool clear_all_softrefs);
static STWGCTimer* gc_timer() { return GenMarkSweep::_gc_timer; }
static SerialOldTracer* gc_tracer() { return GenMarkSweep::_gc_tracer; }
static STWGCTimer* gc_timer();
static SerialOldTracer* gc_tracer();
private:
// Mark live objects

View File

@ -0,0 +1,143 @@
/*
* Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "gc/g1/g1CollectedHeap.inline.hpp"
#include "gc/g1/g1FullGCScope.hpp"
#include "gc/g1/g1MarkSweep.hpp"
#include "gc/g1/g1RemSet.inline.hpp"
#include "gc/g1/g1SerialFullCollector.hpp"
#include "gc/g1/heapRegionRemSet.hpp"
#include "gc/shared/referenceProcessor.hpp"
G1SerialFullCollector::G1SerialFullCollector(G1FullGCScope* scope,
ReferenceProcessor* reference_processor) :
_scope(scope),
_reference_processor(reference_processor),
_is_alive_mutator(_reference_processor, NULL),
_mt_discovery_mutator(_reference_processor, false) {
// Temporarily make discovery by the STW ref processor single threaded (non-MT)
// and clear the STW ref processor's _is_alive_non_header field.
}
void G1SerialFullCollector::prepare_collection() {
_reference_processor->enable_discovery();
_reference_processor->setup_policy(_scope->should_clear_soft_refs());
}
void G1SerialFullCollector::complete_collection() {
// Enqueue any discovered reference objects that have
// not been removed from the discovered lists.
ReferenceProcessorPhaseTimes pt(NULL, _reference_processor->num_q());
_reference_processor->enqueue_discovered_references(NULL, &pt);
pt.print_enqueue_phase();
// Iterate the heap and rebuild the remembered sets.
rebuild_remembered_sets();
}
void G1SerialFullCollector::collect() {
// Do the actual collection work.
G1MarkSweep::invoke_at_safepoint(_reference_processor, _scope->should_clear_soft_refs());
}
class PostMCRemSetClearClosure: public HeapRegionClosure {
G1CollectedHeap* _g1h;
ModRefBarrierSet* _mr_bs;
public:
PostMCRemSetClearClosure(G1CollectedHeap* g1h, ModRefBarrierSet* mr_bs) :
_g1h(g1h), _mr_bs(mr_bs) {}
bool doHeapRegion(HeapRegion* r) {
HeapRegionRemSet* hrrs = r->rem_set();
_g1h->reset_gc_time_stamps(r);
if (r->is_continues_humongous()) {
// We'll assert that the strong code root list and RSet is empty
assert(hrrs->strong_code_roots_list_length() == 0, "sanity");
assert(hrrs->occupied() == 0, "RSet should be empty");
} else {
hrrs->clear();
}
// You might think here that we could clear just the cards
// corresponding to the used region. But no: if we leave a dirty card
// in a region we might allocate into, then it would prevent that card
// from being enqueued, and cause it to be missed.
// Re: the performance cost: we shouldn't be doing full GC anyway!
_mr_bs->clear(MemRegion(r->bottom(), r->end()));
return false;
}
};
class RebuildRSOutOfRegionClosure: public HeapRegionClosure {
G1CollectedHeap* _g1h;
RebuildRSOopClosure _cl;
public:
RebuildRSOutOfRegionClosure(G1CollectedHeap* g1, uint worker_i = 0) :
_cl(g1->g1_rem_set(), worker_i),
_g1h(g1)
{ }
bool doHeapRegion(HeapRegion* r) {
if (!r->is_continues_humongous()) {
_cl.set_from(r);
r->oop_iterate(&_cl);
}
return false;
}
};
class ParRebuildRSTask: public AbstractGangTask {
G1CollectedHeap* _g1;
HeapRegionClaimer _hrclaimer;
public:
ParRebuildRSTask(G1CollectedHeap* g1) :
AbstractGangTask("ParRebuildRSTask"), _g1(g1), _hrclaimer(g1->workers()->active_workers()) {}
void work(uint worker_id) {
RebuildRSOutOfRegionClosure rebuild_rs(_g1, worker_id);
_g1->heap_region_par_iterate(&rebuild_rs, worker_id, &_hrclaimer);
}
};
void G1SerialFullCollector::rebuild_remembered_sets() {
G1CollectedHeap* g1h = G1CollectedHeap::heap();
// First clear the stale remembered sets.
PostMCRemSetClearClosure rs_clear(g1h, g1h->g1_barrier_set());
g1h->heap_region_iterate(&rs_clear);
// Rebuild remembered sets of all regions.
uint n_workers = AdaptiveSizePolicy::calc_active_workers(g1h->workers()->total_workers(),
g1h->workers()->active_workers(),
Threads::number_of_non_daemon_threads());
g1h->workers()->update_active_workers(n_workers);
log_info(gc,task)("Using %u workers of %u to rebuild remembered set", n_workers, g1h->workers()->total_workers());
ParRebuildRSTask rebuild_rs_task(g1h);
g1h->workers()->run_task(&rebuild_rs_task);
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_VM_GC_G1_G1SERIALCOLLECTOR_HPP
#define SHARE_VM_GC_G1_G1SERIALCOLLECTOR_HPP
#include "memory/allocation.hpp"
class G1FullGCScope;
class ReferenceProcessor;
class G1SerialFullCollector : StackObj {
G1FullGCScope* _scope;
ReferenceProcessor* _reference_processor;
ReferenceProcessorIsAliveMutator _is_alive_mutator;
ReferenceProcessorMTDiscoveryMutator _mt_discovery_mutator;
void rebuild_remembered_sets();
public:
G1SerialFullCollector(G1FullGCScope* scope, ReferenceProcessor* reference_processor);
void prepare_collection();
void collect();
void complete_collection();
};
#endif // SHARE_VM_GC_G1_G1SERIALCOLLECTOR_HPP

View File

@ -151,6 +151,8 @@ class ClearedAllSoftRefs : public StackObj {
_collector_policy->cleared_all_soft_refs();
}
}
bool should_clear() { return _clear_all_soft_refs; }
};
class GenCollectorPolicy : public CollectorPolicy {

View File

@ -54,68 +54,6 @@ compiler/types/correctness/OffTest.java 8066173 generic-all
# aot test intermittently failing in jprt 8175791
compiler/aot/DeoptimizationTest.java 8175791 windows-all
# aot missing tools (linker) on OS-X and Windows 8183337
compiler/aot/calls/fromAot/AotInvokeDynamic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeDynamic2CompiledTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeDynamic2InterpretedTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeDynamic2NativeTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeInterface2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeInterface2CompiledTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeInterface2InterpretedTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeInterface2NativeTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeSpecial2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeSpecial2CompiledTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeSpecial2InterpretedTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeSpecial2NativeTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeStatic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeStatic2CompiledTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeStatic2InterpretedTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeStatic2NativeTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeVirtual2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeVirtual2CompiledTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeVirtual2InterpretedTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromAot/AotInvokeVirtual2NativeTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromCompiled/CompiledInvokeDynamic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromCompiled/CompiledInvokeInterface2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromCompiled/CompiledInvokeSpecial2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromCompiled/CompiledInvokeStatic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromCompiled/CompiledInvokeVirtual2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromInterpreted/InterpretedInvokeDynamic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromInterpreted/InterpretedInvokeInterface2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromInterpreted/InterpretedInvokeSpecial2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromInterpreted/InterpretedInvokeStatic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromInterpreted/InterpretedInvokeVirtual2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromNative/NativeInvokeSpecial2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromNative/NativeInvokeStatic2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/calls/fromNative/NativeInvokeVirtual2AotTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/DisabledAOTWithLibraryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/IncorrectAOTLibraryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/ClasspathOptionUnknownClassTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/CompileClassTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/CompileDirectoryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/CompileJarTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/CompileModuleTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/ListOptionNotExistingTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/ListOptionTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/jaotc/ListOptionWrongFileTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/MultipleAOTLibraryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/NonExistingAOTLibraryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/SingleAOTLibraryTest.java 8183337 windows-all,macosx-all
compiler/aot/cli/SingleAOTOptionTest.java 8183337 windows-all,macosx-all
compiler/aot/DeoptimizationTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/ClassSearchTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/ClassSourceTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/directory/DirectorySourceProviderTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/jar/JarSourceProviderTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/module/ModuleSourceProviderTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/collect/SearchPathTest.java 8183337 windows-all,macosx-all
compiler/aot/jdk.tools.jaotc.test/src/jdk/tools/jaotc/test/NativeOrderOutputStreamTest.java 8183337 windows-all,macosx-all
compiler/aot/RecompilationTest.java 8183337 windows-all,macosx-all
compiler/aot/SharedUsageTest.java 8183337 windows-all,macosx-all
compiler/aot/verification/ClassAndLibraryNotMatchTest.java 8183337 windows-all,macosx-all
compiler/aot/verification/vmflags/NotTrackedFlagTest.java 8183337 windows-all,macosx-all
compiler/aot/verification/vmflags/TrackedFlagTest.java 8183337 windows-all,macosx-all
#############################################################################
# :hotspot_gc

View File

@ -23,9 +23,14 @@
package compiler.aot;
import jdk.test.lib.Platform;
import jdk.test.lib.artifacts.Artifact;
import jdk.test.lib.artifacts.ArtifactResolver;
import jdk.test.lib.process.OutputAnalyzer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -35,6 +40,7 @@ import java.util.Arrays;
import java.util.List;
import jdk.test.lib.JDKToolLauncher;
import jdk.test.lib.Utils;
import jdk.test.lib.process.ProcessTools;
/**
* A simple class calling AOT compiler over requested items
@ -102,6 +108,11 @@ public class AotCompiler {
}
args.add("--class-name");
args.add(item);
String linker = resolveLinker();
if (linker != null) {
args.add("--linker-path");
args.add(linker);
}
return launchJaotc(args, extraopts);
}
@ -119,8 +130,8 @@ public class AotCompiler {
launcher.addToolArg(arg);
}
try {
return new OutputAnalyzer(new ProcessBuilder(launcher.getCommand()).inheritIO().start());
} catch (IOException e) {
return ProcessTools.executeCommand(new ProcessBuilder(launcher.getCommand()).redirectErrorStream(true));
} catch (Throwable e) {
throw new Error("Can't start test process: " + e, e);
}
}
@ -130,4 +141,142 @@ public class AotCompiler {
+ " -class <class> -libname <.so name>"
+ " [-compile <compileItems>]* [-extraopt <java option>]*");
}
public static String resolveLinker() {
Path linker = null;
// 1st, check if PATH has ld
for (String path : System.getenv("PATH").split(File.pathSeparator)) {
if (Files.exists(Paths.get(path).resolve("ld"))) {
// there is ld in PATH, jaotc is supposed to find it by its own
return null;
}
}
// there is no ld in PATH, will use ld from devkit
// artifacts are got from common/conf/jib-profiles.js
try {
if (Platform.isWindows()) {
if (Platform.isX64()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-windows_x64",
revision = "VS2013SP4+1.0",
extension = "tar.gz")
class DevkitWindowsX64 { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-windows_x64-"
+ "VS2013SP4+1.0";
Path devkit = ArtifactResolver.resolve(DevkitWindowsX64.class)
.get(artifactName);
linker = devkit.resolve("VC")
.resolve("bin")
.resolve("amd64")
.resolve("link.exe");
}
} else if (Platform.isOSX()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-macosx_x64",
revision = "Xcode6.3-MacOSX10.9+1.0",
extension = "tar.gz")
class DevkitMacosx { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-macosx_x64-"
+ "Xcode6.3-MacOSX10.9+1.0";
Path devkit = ArtifactResolver.resolve(DevkitMacosx.class)
.get(artifactName);
linker = devkit.resolve("Xcode.app")
.resolve("Contents")
.resolve("Developer")
.resolve("Toolchains")
.resolve("XcodeDefault.xctoolchain")
.resolve("usr")
.resolve("bin")
.resolve("ld");
} else if (Platform.isSolaris()) {
if (Platform.isSparc()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-solaris_sparcv9",
revision = "SS12u4-Solaris11u1+1.0",
extension = "tar.gz")
class DevkitSolarisSparc { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-solaris_sparcv9-"
+ "SS12u4-Solaris11u1+1.0";
Path devkit = ArtifactResolver.resolve(DevkitSolarisSparc.class)
.get(artifactName);
linker = devkit.resolve("SS12u4-Solaris11u1")
.resolve("gnu")
.resolve("bin")
.resolve("ld");
} else if (Platform.isX64()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-solaris_x64",
revision = "SS12u4-Solaris11u1+1.0",
extension = "tar.gz")
class DevkitSolarisX64 { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-solaris_x64-"
+ "SS12u4-Solaris11u1+1.0";
Path devkit = ArtifactResolver.resolve(DevkitSolarisX64.class)
.get(artifactName);
linker = devkit.resolve("SS12u4-Solaris11u1")
.resolve("bin")
.resolve("amd64")
.resolve("ld");
}
} else if (Platform.isLinux()) {
if (Platform.isAArch64()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-linux_aarch64",
revision = "gcc-linaro-aarch64-linux-gnu-4.8-2013.11_linux+1.0",
extension = "tar.gz")
class DevkitLinuxAArch64 { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-linux_aarch64-"
+ "gcc-linaro-aarch64-linux-gnu-4.8-2013.11_linux+1.0";
Path devkit = ArtifactResolver.resolve(DevkitLinuxAArch64.class)
.get(artifactName);
linker = devkit.resolve("aarch64-linux-gnu")
.resolve("bin")
.resolve("ld");
} else if (Platform.isARM()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-linux_arm",
revision = "gcc-linaro-arm-linux-gnueabihf-raspbian-2012.09-20120921_linux+1.0",
extension = "tar.gz")
class DevkitLinuxARM { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-linux_arm-"
+ "gcc-linaro-arm-linux-gnueabihf-raspbian-2012.09-20120921_linux+1.0";
Path devkit = ArtifactResolver.resolve(DevkitLinuxARM.class)
.get(artifactName);
linker = devkit.resolve("arm-linux-gnueabihf")
.resolve("bin")
.resolve("ld");
} else if (Platform.isX64()) {
@Artifact(organization = "jpg.infra.builddeps",
name = "devkit-linux_x64",
revision = "gcc4.9.2-OEL6.4+1.1",
extension = "tar.gz")
class DevkitLinuxX64 { }
String artifactName = "jpg.infra.builddeps."
+ "devkit-linux_x64-"
+ "gcc4.9.2-OEL6.4+1.1";
Path devkit = ArtifactResolver.resolve(DevkitLinuxX64.class)
.get(artifactName);
linker = devkit.resolve("bin")
.resolve("ld");
}
}
} catch (FileNotFoundException e) {
throw new Error("artifact resolution error: " + e, e);
}
if (linker != null) {
return linker.toAbsolutePath().toString();
}
return null;
}
}

View File

@ -0,0 +1,3 @@
# TODO: remove as soon as JIB supports concurrent installations
exclusiveAccess.dirs=.

View File

@ -23,14 +23,18 @@
package compiler.aot.cli.jaotc;
import compiler.aot.AotCompiler;
import java.io.File;
import java.io.IOException;
import jdk.test.lib.process.ExitCode;
import jdk.test.lib.Platform;
import jdk.test.lib.JDKToolLauncher;
import jdk.test.lib.Utils;
import jdk.test.lib.cli.CommandLineOptionTest;
import jdk.test.lib.process.OutputAnalyzer;
import jdk.test.lib.process.ProcessTools;
public class JaotcTestHelper {
public static final String DEFAULT_LIB_PATH = "./unnamed." + Platform.sharedLibraryExt();
@ -49,10 +53,15 @@ public class JaotcTestHelper {
for (String arg : args) {
launcher.addToolArg(arg);
}
String linker = AotCompiler.resolveLinker();
if (linker != null) {
launcher.addToolArg("--linker-path");
launcher.addToolArg(linker);
}
String[] cmd = launcher.getCommand();
try {
return new OutputAnalyzer(new ProcessBuilder(cmd).start());
} catch (IOException e) {
return ProcessTools.executeCommand(cmd);
} catch (Throwable e) {
throw new Error("Can't start test process: " + e, e);
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (c) 2002, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 4755500
* @summary calling Math.round(NaN) can break subsequent calls to Math.round()
* @run main compiler.floatingpoint.TestRound
*/
package compiler.floatingpoint;
public class TestRound {
public static void main(String[] args) {
// Note: it's really only necessary to run this loop 8 times to
// reproduce the bug, but the 10000-length loop causes compilation
// of Math.round() without any other command-line flags.
// A bug in the d2l NaN case was causing overflow of the FPU
// stack, yielding subsequent wrong results for flds.
for (int i = 0; i < 10_000; i++) {
Math.round(Double.NaN);
}
if (Math.round(1d) != 1) {
throw new AssertionError("TEST FAILED");
}
System.out.println("Test passed.");
}
}

View File

@ -40,6 +40,8 @@ import java.nio.file.Paths;
* @requires vm.gc.G1
* @requires vm.opt.G1HeapRegionSize == "null" | vm.opt.G1HeapRegionSize == "1M"
* @requires vm.opt.ExplicitGCInvokesConcurrent != true
* @requires vm.opt.ClassUnloading != false
* @requires vm.opt.ClassUnloadingWithConcurrentMark != false
* @library /test/lib /
* @modules java.base/jdk.internal.misc
* @modules java.management

View File

@ -23,6 +23,9 @@
package utils;
import common.ToolResults;
import java.text.NumberFormat;
import java.text.ParseException;
/**
* Results of running the jstat tool Concrete subclasses will detail the jstat
@ -55,7 +58,13 @@ abstract public class JstatResults extends ToolResults {
*/
public float getFloatValue(String name) {
int valueNdx = new StringOfValues(getStdoutLine(0)).getIndex(name);
return Float.valueOf(new StringOfValues(getStdoutLine(1)).getValue(valueNdx));
// Let the parsing use the current locale format.
try {
return NumberFormat.getInstance().parse(new StringOfValues(getStdoutLine(1)).getValue(valueNdx)).floatValue();
} catch (ParseException e) {
throw new NumberFormatException(e.getMessage());
}
}
/**
@ -66,7 +75,11 @@ abstract public class JstatResults extends ToolResults {
*/
public int getIntValue(String name) {
int valueNdx = new StringOfValues(getStdoutLine(0)).getIndex(name);
return Integer.valueOf(new StringOfValues(getStdoutLine(1)).getValue(valueNdx));
try {
return NumberFormat.getInstance().parse(new StringOfValues(getStdoutLine(1)).getValue(valueNdx)).intValue();
} catch (ParseException e) {
throw new NumberFormatException(e.getMessage());
}
}
/**