This commit is contained in:
David Dehaven 2015-07-14 15:26:34 -07:00
commit bf10dfe7fe
263 changed files with 7020 additions and 2172 deletions

View File

@ -314,3 +314,4 @@ ff3fc75f3214ad7e03595be1b0d0f38d887b6f0e jdk9-b66
d69c968463f0ae5d0b45de3fc14fe65171b23948 jdk9-b69
43d0179ee9de3bfffae3417f09e07eb6d8efc963 jdk9-b70
f66c185284727f6e6ffd27e9c45ed2dd9da0a691 jdk9-b71
61d2d0629b6dbf4c091dc86151ade1b3ef34fffe jdk9-b72

View File

@ -314,3 +314,4 @@ f546760134eb861fcfecd4ce611b0040b0d25a6a jdk9-b67
1bcfd6b8726582cff5a42dbfc75903e36f9dd4fe jdk9-b69
eed77fcd77711fcdba05f18fc22f37d86efb243c jdk9-b70
c706ef5ea5da00078dc5e4334660315f7d99c15b jdk9-b71
8582c35016fb6211b373810b6b172feccf9c483b jdk9-b72

View File

@ -314,3 +314,4 @@ afc1e295c4bf83f9a5dd539c29914edd4a754a3f jdk9-b65
de8acedcb5b5870f1dc54cba575aaa5d33897ea2 jdk9-b69
e7cf01990ed366bd493080663259281e91ce223b jdk9-b70
cd39ed501fb0504554a7f58ac6cf3dd2b64afec0 jdk9-b71
f9f3706bd24c42c07cb260fe05730a749b8e52f4 jdk9-b72

View File

@ -474,3 +474,4 @@ d47dfabd16d48eb96a451edd1b61194a39ee0eb5 jdk9-b67
ff0929a59ced0e144201aa05819ae2e47d6f2c61 jdk9-b69
8672e9264db30c21504063932dbc374eabc287a1 jdk9-b70
07c6b035d68b0c41b1dcd442157b50b41a2551e9 jdk9-b71
c1b2825ef47e75cb34dd18450d1c4280b7c5853c jdk9-b72

File diff suppressed because it is too large Load Diff

View File

@ -2055,6 +2055,9 @@ public:
INSN(negr, 1, 0b100000101110);
INSN(notr, 1, 0b100000010110);
INSN(addv, 0, 0b110001101110);
INSN(cls, 0, 0b100000010010);
INSN(clz, 1, 0b100000010010);
INSN(cnt, 0, 0b100000010110);
#undef INSN

View File

@ -36,6 +36,7 @@
class MacroAssembler: public Assembler {
friend class LIR_Assembler;
public:
using Assembler::mov;
using Assembler::movi;

View File

@ -199,6 +199,12 @@ void VM_Version::get_processor_features() {
UseCRC32Intrinsics = true;
}
if (UseCRC32CIntrinsics) {
if (!FLAG_IS_DEFAULT(UseCRC32CIntrinsics))
warning("CRC32C intrinsics are not available on this CPU");
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
}
if (auxv & (HWCAP_SHA1 | HWCAP_SHA2)) {
if (FLAG_IS_DEFAULT(UseSHA)) {
FLAG_SET_DEFAULT(UseSHA, true);
@ -251,6 +257,10 @@ void VM_Version::get_processor_features() {
UseBarriersForVolatile = (_cpuFeatures & CPU_DMB_ATOMICS) != 0;
}
if (FLAG_IS_DEFAULT(UsePopCountInstruction)) {
UsePopCountInstruction = true;
}
#ifdef COMPILER2
if (FLAG_IS_DEFAULT(OptoScheduling)) {
OptoScheduling = true;

View File

@ -191,6 +191,13 @@ void VM_Version::initialize() {
FLAG_SET_DEFAULT(UseSHA256Intrinsics, false);
FLAG_SET_DEFAULT(UseSHA512Intrinsics, false);
}
if (UseCRC32CIntrinsics) {
if (!FLAG_IS_DEFAULT(UseCRC32CIntrinsics))
warning("CRC32C intrinsics are not available on this CPU");
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
}
// Adjust RTM (Restricted Transactional Memory) flags.
if (!has_tcheck() && UseRTMLocking) {
// Can't continue because UseRTMLocking affects UseBiasedLocking flag

View File

@ -128,8 +128,11 @@ class Assembler : public AbstractAssembler {
faligndata_op3 = 0x36,
flog3_op3 = 0x36,
edge_op3 = 0x36,
fzero_op3 = 0x36,
fsrc_op3 = 0x36,
fnot_op3 = 0x36,
xmulx_op3 = 0x36,
crc32c_op3 = 0x36,
impdep2_op3 = 0x37,
stpartialf_op3 = 0x37,
jmpl_op3 = 0x38,
@ -231,7 +234,9 @@ class Assembler : public AbstractAssembler {
sha1_opf = 0x141,
sha256_opf = 0x142,
sha512_opf = 0x143
sha512_opf = 0x143,
crc32c_opf = 0x147
};
enum op5s {
@ -600,6 +605,11 @@ class Assembler : public AbstractAssembler {
return x & ((1 << 10) - 1);
}
// create a low12 __value__ (not a field) for a given a 32-bit constant
static int low12( int x ) {
return x & ((1 << 12) - 1);
}
// AES crypto instructions supported only on certain processors
static void aes_only() { assert( VM_Version::has_aes(), "This instruction only works on SPARC with AES instructions support"); }
@ -608,6 +618,9 @@ class Assembler : public AbstractAssembler {
static void sha256_only() { assert( VM_Version::has_sha256(), "This instruction only works on SPARC with SHA256"); }
static void sha512_only() { assert( VM_Version::has_sha512(), "This instruction only works on SPARC with SHA512"); }
// CRC32C instruction supported only on certain processors
static void crc32c_only() { assert( VM_Version::has_crc32c(), "This instruction only works on SPARC with CRC32C"); }
// instruction only in VIS1
static void vis1_only() { assert( VM_Version::has_vis1(), "This instruction only works on SPARC with VIS1"); }
@ -1022,6 +1035,7 @@ public:
void nop() { emit_int32( op(branch_op) | op2(sethi_op2) ); }
void sw_count() { emit_int32( op(branch_op) | op2(sethi_op2) | 0x3f0 ); }
// pp 202
@ -1198,8 +1212,14 @@ public:
void faligndata( FloatRegister s1, FloatRegister s2, FloatRegister d ) { vis1_only(); emit_int32( op(arith_op) | fd(d, FloatRegisterImpl::D) | op3(faligndata_op3) | fs1(s1, FloatRegisterImpl::D) | opf(faligndata_opf) | fs2(s2, FloatRegisterImpl::D)); }
void fzero( FloatRegisterImpl::Width w, FloatRegister d ) { vis1_only(); emit_int32( op(arith_op) | fd(d, w) | op3(fzero_op3) | opf(0x62 - w)); }
void fsrc2( FloatRegisterImpl::Width w, FloatRegister s2, FloatRegister d ) { vis1_only(); emit_int32( op(arith_op) | fd(d, w) | op3(fsrc_op3) | opf(0x7A - w) | fs2(s2, w)); }
void fnot1( FloatRegisterImpl::Width w, FloatRegister s1, FloatRegister d ) { vis1_only(); emit_int32( op(arith_op) | fd(d, w) | op3(fnot_op3) | fs1(s1, w) | opf(0x6C - w)); }
void fpmerge( FloatRegister s1, FloatRegister s2, FloatRegister d ) { vis1_only(); emit_int32( op(arith_op) | fd(d, FloatRegisterImpl::D) | op3(0x36) | fs1(s1, FloatRegisterImpl::S) | opf(0x4b) | fs2(s2, FloatRegisterImpl::S)); }
void stpartialf( Register s1, Register s2, FloatRegister d, int ia = -1 ) { vis1_only(); emit_int32( op(ldst_op) | fd(d, FloatRegisterImpl::D) | op3(stpartialf_op3) | rs1(s1) | imm_asi(ia) | rs2(s2)); }
// VIS2 instructions
@ -1224,6 +1244,10 @@ public:
void sha256() { sha256_only(); emit_int32( op(arith_op) | op3(sha_op3) | opf(sha256_opf)); }
void sha512() { sha512_only(); emit_int32( op(arith_op) | op3(sha_op3) | opf(sha512_opf)); }
// CRC32C instruction
void crc32c( FloatRegister s1, FloatRegister s2, FloatRegister d ) { crc32c_only(); emit_int32( op(arith_op) | fd(d, FloatRegisterImpl::D) | op3(crc32c_op3) | fs1(s1, FloatRegisterImpl::D) | opf(crc32c_opf) | fs2(s2, FloatRegisterImpl::D)); }
// Creation
Assembler(CodeBuffer* code) : AbstractAssembler(code) {
#ifdef CHECK_DELAY

View File

@ -956,6 +956,7 @@ void MacroAssembler::set64(jlong value, Register d, Register tmp) {
int hi = (int)(value >> 32);
int lo = (int)(value & ~0);
int bits_33to2 = (int)((value >> 2) & ~0);
// (Matcher::isSimpleConstant64 knows about the following optimizations.)
if (Assembler::is_simm13(lo) && value == lo) {
or3(G0, lo, d);
@ -964,6 +965,12 @@ void MacroAssembler::set64(jlong value, Register d, Register tmp) {
if (low10(lo) != 0)
or3(d, low10(lo), d);
}
else if ((hi >> 2) == 0) {
Assembler::sethi(bits_33to2, d); // hardware version zero-extends to upper 32
sllx(d, 2, d);
if (low12(lo) != 0)
or3(d, low12(lo), d);
}
else if (hi == -1) {
Assembler::sethi(~lo, d); // hardware version zero-extends to upper 32
xor3(d, low10(lo) ^ ~low10(~0), d);
@ -4351,3 +4358,52 @@ void MacroAssembler::bis_zeroing(Register to, Register count, Register temp, Lab
cmp_and_brx_short(to, end, Assembler::lessUnsigned, Assembler::pt, small_loop);
nop(); // Separate short branches
}
/**
* Update CRC-32[C] with a byte value according to constants in table
*
* @param [in,out]crc Register containing the crc.
* @param [in]val Register containing the byte to fold into the CRC.
* @param [in]table Register containing the table of crc constants.
*
* uint32_t crc;
* val = crc_table[(val ^ crc) & 0xFF];
* crc = val ^ (crc >> 8);
*/
void MacroAssembler::update_byte_crc32(Register crc, Register val, Register table) {
xor3(val, crc, val);
and3(val, 0xFF, val);
sllx(val, 2, val);
lduw(table, val, val);
srlx(crc, 8, crc);
xor3(val, crc, crc);
}
// Reverse byte order of lower 32 bits, assuming upper 32 bits all zeros
void MacroAssembler::reverse_bytes_32(Register src, Register dst, Register tmp) {
srlx(src, 24, dst);
sllx(src, 32+8, tmp);
srlx(tmp, 32+24, tmp);
sllx(tmp, 8, tmp);
or3(dst, tmp, dst);
sllx(src, 32+16, tmp);
srlx(tmp, 32+24, tmp);
sllx(tmp, 16, tmp);
or3(dst, tmp, dst);
sllx(src, 32+24, tmp);
srlx(tmp, 32, tmp);
or3(dst, tmp, dst);
}
void MacroAssembler::movitof_revbytes(Register src, FloatRegister dst, Register tmp1, Register tmp2) {
reverse_bytes_32(src, tmp1, tmp2);
movxtod(tmp1, dst);
}
void MacroAssembler::movftoi_revbytes(FloatRegister src, Register dst, Register tmp1, Register tmp2) {
movdtox(src, tmp1);
reverse_bytes_32(tmp1, dst, tmp2);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -903,6 +903,10 @@ public:
inline void ldf(FloatRegisterImpl::Width w, Register s1, RegisterOrConstant s2, FloatRegister d);
inline void ldf(FloatRegisterImpl::Width w, const Address& a, FloatRegister d, int offset = 0);
// little-endian
inline void ldxl(Register s1, Register s2, Register d) { ldxa(s1, s2, ASI_PRIMARY_LITTLE, d); }
inline void ldfl(FloatRegisterImpl::Width w, Register s1, Register s2, FloatRegister d) { ldfa(w, s1, s2, ASI_PRIMARY_LITTLE, d); }
// membar psuedo instruction. takes into account target memory model.
inline void membar( Assembler::Membar_mask_bits const7a );
@ -1436,6 +1440,14 @@ public:
// Use BIS for zeroing
void bis_zeroing(Register to, Register count, Register temp, Label& Ldone);
// Update CRC-32[C] with a byte value according to constants in table
void update_byte_crc32(Register crc, Register val, Register table);
// Reverse byte order of lower 32 bits, assuming upper 32 bits all zeros
void reverse_bytes_32(Register src, Register dst, Register tmp);
void movitof_revbytes(Register src, FloatRegister dst, Register tmp1, Register tmp2);
void movftoi_revbytes(FloatRegister src, Register dst, Register tmp1, Register tmp2);
#undef VIRTUAL
};

View File

@ -4910,6 +4910,206 @@ class StubGenerator: public StubCodeGenerator {
return start;
}
#define CHUNK_LEN 128 /* 128 x 8B = 1KB */
#define CHUNK_K1 0x1307a0206 /* reverseBits(pow(x, CHUNK_LEN*8*8*3 - 32) mod P(x)) << 1 */
#define CHUNK_K2 0x1a0f717c4 /* reverseBits(pow(x, CHUNK_LEN*8*8*2 - 32) mod P(x)) << 1 */
#define CHUNK_K3 0x0170076fa /* reverseBits(pow(x, CHUNK_LEN*8*8*1 - 32) mod P(x)) << 1 */
/**
* Arguments:
*
* Inputs:
* O0 - int crc
* O1 - byte* buf
* O2 - int len
* O3 - int* table
*
* Output:
* O0 - int crc result
*/
address generate_updateBytesCRC32C() {
assert(UseCRC32CIntrinsics, "need CRC32C instruction");
__ align(CodeEntryAlignment);
StubCodeMark mark(this, "StubRoutines", "updateBytesCRC32C");
address start = __ pc();
const Register crc = O0; // crc
const Register buf = O1; // source java byte array address
const Register len = O2; // number of bytes
const Register table = O3; // byteTable
Label L_crc32c_head, L_crc32c_aligned;
Label L_crc32c_parallel, L_crc32c_parallel_loop;
Label L_crc32c_serial, L_crc32c_x32_loop, L_crc32c_x8, L_crc32c_x8_loop;
Label L_crc32c_done, L_crc32c_tail, L_crc32c_return;
__ cmp_and_br_short(len, 0, Assembler::lessEqual, Assembler::pn, L_crc32c_return);
// clear upper 32 bits of crc
__ clruwu(crc);
__ and3(buf, 7, G4);
__ cmp_and_brx_short(G4, 0, Assembler::equal, Assembler::pt, L_crc32c_aligned);
__ mov(8, G1);
__ sub(G1, G4, G4);
// ------ process the misaligned head (7 bytes or less) ------
__ BIND(L_crc32c_head);
// crc = (crc >>> 8) ^ byteTable[(crc ^ b) & 0xFF];
__ ldub(buf, 0, G1);
__ update_byte_crc32(crc, G1, table);
__ inc(buf);
__ dec(len);
__ cmp_and_br_short(len, 0, Assembler::equal, Assembler::pn, L_crc32c_return);
__ dec(G4);
__ cmp_and_br_short(G4, 0, Assembler::greater, Assembler::pt, L_crc32c_head);
// ------ process the 8-byte-aligned body ------
__ BIND(L_crc32c_aligned);
__ nop();
__ cmp_and_br_short(len, 8, Assembler::less, Assembler::pn, L_crc32c_tail);
// reverse the byte order of lower 32 bits to big endian, and move to FP side
__ movitof_revbytes(crc, F0, G1, G3);
__ set(CHUNK_LEN*8*4, G4);
__ cmp_and_br_short(len, G4, Assembler::less, Assembler::pt, L_crc32c_serial);
// ------ process four 1KB chunks in parallel ------
__ BIND(L_crc32c_parallel);
__ fzero(FloatRegisterImpl::D, F2);
__ fzero(FloatRegisterImpl::D, F4);
__ fzero(FloatRegisterImpl::D, F6);
__ mov(CHUNK_LEN - 1, G4);
__ BIND(L_crc32c_parallel_loop);
// schedule ldf's ahead of crc32c's to hide the load-use latency
__ ldf(FloatRegisterImpl::D, buf, 0, F8);
__ ldf(FloatRegisterImpl::D, buf, CHUNK_LEN*8, F10);
__ ldf(FloatRegisterImpl::D, buf, CHUNK_LEN*16, F12);
__ ldf(FloatRegisterImpl::D, buf, CHUNK_LEN*24, F14);
__ crc32c(F0, F8, F0);
__ crc32c(F2, F10, F2);
__ crc32c(F4, F12, F4);
__ crc32c(F6, F14, F6);
__ inc(buf, 8);
__ dec(G4);
__ cmp_and_br_short(G4, 0, Assembler::greater, Assembler::pt, L_crc32c_parallel_loop);
__ ldf(FloatRegisterImpl::D, buf, 0, F8);
__ ldf(FloatRegisterImpl::D, buf, CHUNK_LEN*8, F10);
__ ldf(FloatRegisterImpl::D, buf, CHUNK_LEN*16, F12);
__ crc32c(F0, F8, F0);
__ crc32c(F2, F10, F2);
__ crc32c(F4, F12, F4);
__ inc(buf, CHUNK_LEN*24);
__ ldfl(FloatRegisterImpl::D, buf, G0, F14); // load in little endian
__ inc(buf, 8);
__ prefetch(buf, 0, Assembler::severalReads);
__ prefetch(buf, CHUNK_LEN*8, Assembler::severalReads);
__ prefetch(buf, CHUNK_LEN*16, Assembler::severalReads);
__ prefetch(buf, CHUNK_LEN*24, Assembler::severalReads);
// move to INT side, and reverse the byte order of lower 32 bits to little endian
__ movftoi_revbytes(F0, O4, G1, G4);
__ movftoi_revbytes(F2, O5, G1, G4);
__ movftoi_revbytes(F4, G5, G1, G4);
// combine the results of 4 chunks
__ set64(CHUNK_K1, G3, G1);
__ xmulx(O4, G3, O4);
__ set64(CHUNK_K2, G3, G1);
__ xmulx(O5, G3, O5);
__ set64(CHUNK_K3, G3, G1);
__ xmulx(G5, G3, G5);
__ movdtox(F14, G4);
__ xor3(O4, O5, O5);
__ xor3(G5, O5, O5);
__ xor3(G4, O5, O5);
// reverse the byte order to big endian, via stack, and move to FP side
__ add(SP, -8, G1);
__ srlx(G1, 3, G1);
__ sllx(G1, 3, G1);
__ stx(O5, G1, G0);
__ ldfl(FloatRegisterImpl::D, G1, G0, F2); // load in little endian
__ crc32c(F6, F2, F0);
__ set(CHUNK_LEN*8*4, G4);
__ sub(len, G4, len);
__ cmp_and_br_short(len, G4, Assembler::greaterEqual, Assembler::pt, L_crc32c_parallel);
__ nop();
__ cmp_and_br_short(len, 0, Assembler::equal, Assembler::pt, L_crc32c_done);
__ BIND(L_crc32c_serial);
__ mov(32, G4);
__ cmp_and_br_short(len, G4, Assembler::less, Assembler::pn, L_crc32c_x8);
// ------ process 32B chunks ------
__ BIND(L_crc32c_x32_loop);
__ ldf(FloatRegisterImpl::D, buf, 0, F2);
__ inc(buf, 8);
__ crc32c(F0, F2, F0);
__ ldf(FloatRegisterImpl::D, buf, 0, F2);
__ inc(buf, 8);
__ crc32c(F0, F2, F0);
__ ldf(FloatRegisterImpl::D, buf, 0, F2);
__ inc(buf, 8);
__ crc32c(F0, F2, F0);
__ ldf(FloatRegisterImpl::D, buf, 0, F2);
__ inc(buf, 8);
__ crc32c(F0, F2, F0);
__ dec(len, 32);
__ cmp_and_br_short(len, G4, Assembler::greaterEqual, Assembler::pt, L_crc32c_x32_loop);
__ BIND(L_crc32c_x8);
__ nop();
__ cmp_and_br_short(len, 8, Assembler::less, Assembler::pt, L_crc32c_done);
// ------ process 8B chunks ------
__ BIND(L_crc32c_x8_loop);
__ ldf(FloatRegisterImpl::D, buf, 0, F2);
__ inc(buf, 8);
__ crc32c(F0, F2, F0);
__ dec(len, 8);
__ cmp_and_br_short(len, 8, Assembler::greaterEqual, Assembler::pt, L_crc32c_x8_loop);
__ BIND(L_crc32c_done);
// move to INT side, and reverse the byte order of lower 32 bits to little endian
__ movftoi_revbytes(F0, crc, G1, G3);
__ cmp_and_br_short(len, 0, Assembler::equal, Assembler::pt, L_crc32c_return);
// ------ process the misaligned tail (7 bytes or less) ------
__ BIND(L_crc32c_tail);
// crc = (crc >>> 8) ^ byteTable[(crc ^ b) & 0xFF];
__ ldub(buf, 0, G1);
__ update_byte_crc32(crc, G1, table);
__ inc(buf);
__ dec(len);
__ cmp_and_br_short(len, 0, Assembler::greater, Assembler::pt, L_crc32c_tail);
__ BIND(L_crc32c_return);
__ nop();
__ retl();
__ delayed()->nop();
return start;
}
void generate_initial() {
// Generates all stubs and initializes the entry points
@ -5001,6 +5201,11 @@ class StubGenerator: public StubCodeGenerator {
StubRoutines::_sha512_implCompress = generate_sha512_implCompress(false, "sha512_implCompress");
StubRoutines::_sha512_implCompressMB = generate_sha512_implCompress(true, "sha512_implCompressMB");
}
// generate CRC32C intrinsic code
if (UseCRC32CIntrinsics) {
StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -41,7 +41,7 @@ static bool returns_to_call_stub(address return_pc) {
enum /* platform_dependent_constants */ {
// %%%%%%%% May be able to shrink this a lot
code_size1 = 20000, // simply increase if too small (assembler will crash if too small)
code_size2 = 23000 // simply increase if too small (assembler will crash if too small)
code_size2 = 24000 // simply increase if too small (assembler will crash if too small)
};
class Sparc {

View File

@ -230,7 +230,7 @@ void VM_Version::initialize() {
assert((OptoLoopAlignment % relocInfo::addr_unit()) == 0, "alignment is not a multiple of NOP size");
char buf[512];
jio_snprintf(buf, sizeof(buf), "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s",
jio_snprintf(buf, sizeof(buf), "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s",
(has_v9() ? ", v9" : (has_v8() ? ", v8" : "")),
(has_hardware_popc() ? ", popc" : ""),
(has_vis1() ? ", vis1" : ""),
@ -242,6 +242,7 @@ void VM_Version::initialize() {
(has_sha1() ? ", sha1" : ""),
(has_sha256() ? ", sha256" : ""),
(has_sha512() ? ", sha512" : ""),
(has_crc32c() ? ", crc32c" : ""),
(is_ultra3() ? ", ultra3" : ""),
(is_sun4v() ? ", sun4v" : ""),
(is_niagara_plus() ? ", niagara_plus" : (is_niagara() ? ", niagara" : "")),
@ -363,6 +364,23 @@ void VM_Version::initialize() {
}
}
// SPARC T4 and above should have support for CRC32C instruction
if (has_crc32c()) {
if (UseVIS > 2) { // CRC32C intrinsics use VIS3 instructions
if (FLAG_IS_DEFAULT(UseCRC32CIntrinsics)) {
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, true);
}
} else {
if (UseCRC32CIntrinsics) {
warning("SPARC CRC32C intrinsics require VIS3 instruction support. Intrinsics will be disabled.");
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
}
}
} else if (UseCRC32CIntrinsics) {
warning("CRC32C instruction is not available on this CPU");
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
}
if (FLAG_IS_DEFAULT(ContendedPaddingWidth) &&
(cache_line_size > ContendedPaddingWidth))
ContendedPaddingWidth = cache_line_size;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -53,7 +53,8 @@ protected:
aes_instructions = 19,
sha1_instruction = 20,
sha256_instruction = 21,
sha512_instruction = 22
sha512_instruction = 22,
crc32c_instruction = 23
};
enum Feature_Flag_Set {
@ -83,6 +84,7 @@ protected:
sha1_instruction_m = 1 << sha1_instruction,
sha256_instruction_m = 1 << sha256_instruction,
sha512_instruction_m = 1 << sha512_instruction,
crc32c_instruction_m = 1 << crc32c_instruction,
generic_v8_m = v8_instructions_m | hardware_mul32_m | hardware_div32_m | hardware_fsmuld_m,
generic_v9_m = generic_v8_m | v9_instructions_m,
@ -141,6 +143,7 @@ public:
static bool has_sha1() { return (_features & sha1_instruction_m) != 0; }
static bool has_sha256() { return (_features & sha256_instruction_m) != 0; }
static bool has_sha512() { return (_features & sha512_instruction_m) != 0; }
static bool has_crc32c() { return (_features & crc32c_instruction_m) != 0; }
static bool supports_compare_and_exchange()
{ return has_v9(); }

View File

@ -699,6 +699,12 @@ void VM_Version::get_processor_features() {
FLAG_SET_DEFAULT(UseSHA512Intrinsics, false);
}
if (UseCRC32CIntrinsics) {
if (!FLAG_IS_DEFAULT(UseCRC32CIntrinsics))
warning("CRC32C intrinsics are not available on this CPU");
FLAG_SET_DEFAULT(UseCRC32CIntrinsics, false);
}
// Adjust RTM (Restricted Transactional Memory) flags
if (!supports_rtm() && UseRTMLocking) {
// Can't continue because UseRTMLocking affects UseBiasedLocking flag

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2006, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -363,6 +363,11 @@ int VM_Version::platform_features(int features) {
#endif
if (av & AV_SPARC_CBCOND) features |= cbcond_instructions_m;
#ifndef AV_SPARC_CRC32C
#define AV_SPARC_CRC32C 0x20000000 /* crc32c instruction supported */
#endif
if (av & AV_SPARC_CRC32C) features |= crc32c_instruction_m;
#ifndef AV_SPARC_AES
#define AV_SPARC_AES 0x00020000 /* aes instrs supported */
#endif

View File

@ -186,6 +186,10 @@ static bool trust_final_non_static_fields(ciInstanceKlass* holder) {
// Even if general trusting is disabled, trust system-built closures in these packages.
if (holder->is_in_package("java/lang/invoke") || holder->is_in_package("sun/invoke"))
return true;
// Trust VM anonymous classes. They are private API (sun.misc.Unsafe) and can't be serialized,
// so there is no hacking of finals going on with them.
if (holder->is_anonymous())
return true;
return TrustFinalNonStaticFields;
}

View File

@ -58,6 +58,7 @@ ciInstanceKlass::ciInstanceKlass(KlassHandle h_k) :
_nonstatic_field_size = ik->nonstatic_field_size();
_has_nonstatic_fields = ik->has_nonstatic_fields();
_has_default_methods = ik->has_default_methods();
_is_anonymous = ik->is_anonymous();
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
_has_injected_fields = -1;
_implementor = NULL; // we will fill these lazily
@ -101,6 +102,7 @@ ciInstanceKlass::ciInstanceKlass(ciSymbol* name,
_has_nonstatic_fields = false;
_nonstatic_fields = NULL;
_has_injected_fields = -1;
_is_anonymous = false;
_loader = loader;
_protection_domain = protection_domain;
_is_shared = false;

View File

@ -53,6 +53,7 @@ private:
bool _has_subklass;
bool _has_nonstatic_fields;
bool _has_default_methods;
bool _is_anonymous;
ciFlags _flags;
jint _nonstatic_field_size;
@ -179,6 +180,10 @@ public:
return _has_default_methods;
}
bool is_anonymous() {
return _is_anonymous;
}
ciInstanceKlass* get_canonical_holder(int offset);
ciField* get_field_by_offset(int field_offset, bool is_static);
ciField* get_field_by_name(ciSymbol* name, ciSymbol* signature, bool is_static);

View File

@ -863,6 +863,12 @@
do_name( updateByteBuffer_name, "updateByteBuffer") \
do_signature(updateByteBuffer_signature, "(IJII)I") \
\
/* support for java.util.zip.CRC32C */ \
do_class(java_util_zip_CRC32C, "java/util/zip/CRC32C") \
do_intrinsic(_updateBytesCRC32C, java_util_zip_CRC32C, updateBytes_name, updateBytes_signature, F_S) \
do_intrinsic(_updateDirectByteBufferCRC32C, java_util_zip_CRC32C, updateDirectByteBuffer_name, updateByteBuffer_signature, F_S) \
do_name( updateDirectByteBuffer_name, "updateDirectByteBuffer") \
\
/* support for sun.misc.Unsafe */ \
do_class(sun_misc_Unsafe, "sun/misc/Unsafe") \
\

View File

@ -962,6 +962,7 @@ void ConnectionGraph::process_call_arguments(CallNode *call) {
(strcmp(call->as_CallLeaf()->_name, "g1_wb_pre") == 0 ||
strcmp(call->as_CallLeaf()->_name, "g1_wb_post") == 0 ||
strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32") == 0 ||
strcmp(call->as_CallLeaf()->_name, "updateBytesCRC32C") == 0 ||
strcmp(call->as_CallLeaf()->_name, "aescrypt_encryptBlock") == 0 ||
strcmp(call->as_CallLeaf()->_name, "aescrypt_decryptBlock") == 0 ||
strcmp(call->as_CallLeaf()->_name, "cipherBlockChaining_encryptAESCrypt") == 0 ||

View File

@ -197,7 +197,7 @@ class LibraryCallKit : public GraphKit {
CallJavaNode* generate_method_call_virtual(vmIntrinsics::ID method_id) {
return generate_method_call(method_id, true, false);
}
Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static);
Node * load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString, bool is_exact, bool is_static, ciInstanceKlass * fromKls);
Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2);
Node* make_string_method_node(int opcode, Node* str1, Node* str2);
@ -291,6 +291,9 @@ class LibraryCallKit : public GraphKit {
bool inline_updateCRC32();
bool inline_updateBytesCRC32();
bool inline_updateByteBufferCRC32();
Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
bool inline_updateBytesCRC32C();
bool inline_updateDirectByteBufferCRC32C();
bool inline_multiplyToLen();
bool inline_squareToLen();
bool inline_mulAdd();
@ -539,6 +542,11 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
if (!UseCRC32Intrinsics) return NULL;
break;
case vmIntrinsics::_updateBytesCRC32C:
case vmIntrinsics::_updateDirectByteBufferCRC32C:
if (!UseCRC32CIntrinsics) return NULL;
break;
case vmIntrinsics::_incrementExactI:
case vmIntrinsics::_addExactI:
if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
@ -947,6 +955,11 @@ bool LibraryCallKit::try_to_inline(int predicate) {
case vmIntrinsics::_updateByteBufferCRC32:
return inline_updateByteBufferCRC32();
case vmIntrinsics::_updateBytesCRC32C:
return inline_updateBytesCRC32C();
case vmIntrinsics::_updateDirectByteBufferCRC32C:
return inline_updateDirectByteBufferCRC32C();
case vmIntrinsics::_profileBoolean:
return inline_profileBoolean();
case vmIntrinsics::_isCompileConstant:
@ -5536,6 +5549,106 @@ bool LibraryCallKit::inline_updateByteBufferCRC32() {
return true;
}
//------------------------------get_table_from_crc32c_class-----------------------
Node * LibraryCallKit::get_table_from_crc32c_class(ciInstanceKlass *crc32c_class) {
Node* table = load_field_from_object(NULL, "byteTable", "[I", /*is_exact*/ false, /*is_static*/ true, crc32c_class);
assert (table != NULL, "wrong version of java.util.zip.CRC32C");
return table;
}
//------------------------------inline_updateBytesCRC32C-----------------------
//
// Calculate CRC32C for byte[] array.
// int java.util.zip.CRC32C.updateBytes(int crc, byte[] buf, int off, int end)
//
bool LibraryCallKit::inline_updateBytesCRC32C() {
assert(UseCRC32CIntrinsics, "need CRC32C instruction support");
assert(callee()->signature()->size() == 4, "updateBytes has 4 parameters");
assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded");
// no receiver since it is a static method
Node* crc = argument(0); // type: int
Node* src = argument(1); // type: oop
Node* offset = argument(2); // type: int
Node* end = argument(3); // type: int
Node* length = _gvn.transform(new SubINode(end, offset));
const Type* src_type = src->Value(&_gvn);
const TypeAryPtr* top_src = src_type->isa_aryptr();
if (top_src == NULL || top_src->klass() == NULL) {
// failed array check
return false;
}
// Figure out the size and type of the elements we will be copying.
BasicType src_elem = src_type->isa_aryptr()->klass()->as_array_klass()->element_type()->basic_type();
if (src_elem != T_BYTE) {
return false;
}
// 'src_start' points to src array + scaled offset
Node* src_start = array_element_address(src, offset, src_elem);
// static final int[] byteTable in class CRC32C
Node* table = get_table_from_crc32c_class(callee()->holder());
Node* table_start = array_element_address(table, intcon(0), T_INT);
// We assume that range check is done by caller.
// TODO: generate range check (offset+length < src.length) in debug VM.
// Call the stub.
address stubAddr = StubRoutines::updateBytesCRC32C();
const char *stubName = "updateBytesCRC32C";
Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
stubAddr, stubName, TypePtr::BOTTOM,
crc, src_start, length, table_start);
Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
set_result(result);
return true;
}
//------------------------------inline_updateDirectByteBufferCRC32C-----------------------
//
// Calculate CRC32C for DirectByteBuffer.
// int java.util.zip.CRC32C.updateDirectByteBuffer(int crc, long buf, int off, int end)
//
bool LibraryCallKit::inline_updateDirectByteBufferCRC32C() {
assert(UseCRC32CIntrinsics, "need CRC32C instruction support");
assert(callee()->signature()->size() == 5, "updateDirectByteBuffer has 4 parameters and one is long");
assert(callee()->holder()->is_loaded(), "CRC32C class must be loaded");
// no receiver since it is a static method
Node* crc = argument(0); // type: int
Node* src = argument(1); // type: long
Node* offset = argument(3); // type: int
Node* end = argument(4); // type: int
Node* length = _gvn.transform(new SubINode(end, offset));
src = ConvL2X(src); // adjust Java long to machine word
Node* base = _gvn.transform(new CastX2PNode(src));
offset = ConvI2X(offset);
// 'src_start' points to src array + scaled offset
Node* src_start = basic_plus_adr(top(), base, offset);
// static final int[] byteTable in class CRC32C
Node* table = get_table_from_crc32c_class(callee()->holder());
Node* table_start = array_element_address(table, intcon(0), T_INT);
// Call the stub.
address stubAddr = StubRoutines::updateBytesCRC32C();
const char *stubName = "updateBytesCRC32C";
Node* call = make_runtime_call(RC_LEAF, OptoRuntime::updateBytesCRC32C_Type(),
stubAddr, stubName, TypePtr::BOTTOM,
crc, src_start, length, table_start);
Node* result = _gvn.transform(new ProjNode(call, TypeFunc::Parms));
set_result(result);
return true;
}
//----------------------------inline_reference_get----------------------------
// public T java.lang.ref.Reference.get();
bool LibraryCallKit::inline_reference_get() {
@ -5571,18 +5684,28 @@ bool LibraryCallKit::inline_reference_get() {
Node * LibraryCallKit::load_field_from_object(Node * fromObj, const char * fieldName, const char * fieldTypeString,
bool is_exact=true, bool is_static=false) {
bool is_exact=true, bool is_static=false,
ciInstanceKlass * fromKls=NULL) {
if (fromKls == NULL) {
const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
assert(tinst != NULL, "obj is null");
assert(tinst->klass()->is_loaded(), "obj is not loaded");
assert(!is_exact || tinst->klass_is_exact(), "klass not exact");
fromKls = tinst->klass()->as_instance_klass();
} else {
assert(is_static, "only for static field access");
}
ciField* field = fromKls->get_field_by_name(ciSymbol::make(fieldName),
ciSymbol::make(fieldTypeString),
is_static);
const TypeInstPtr* tinst = _gvn.type(fromObj)->isa_instptr();
assert(tinst != NULL, "obj is null");
assert(tinst->klass()->is_loaded(), "obj is not loaded");
assert(!is_exact || tinst->klass_is_exact(), "klass not exact");
ciField* field = tinst->klass()->as_instance_klass()->get_field_by_name(ciSymbol::make(fieldName),
ciSymbol::make(fieldTypeString),
is_static);
if (field == NULL) return (Node *) NULL;
assert (field != NULL, "undefined field");
if (field == NULL) return (Node *) NULL;
if (is_static) {
const TypeInstPtr* tip = TypeInstPtr::make(fromKls->java_mirror());
fromObj = makecon(tip);
}
// Next code copied from Parse::do_get_xxx():

View File

@ -851,6 +851,29 @@ const TypeFunc* OptoRuntime::updateBytesCRC32_Type() {
return TypeFunc::make(domain, range);
}
/**
* int updateBytesCRC32C(int crc, byte* buf, int len, int* table)
*/
const TypeFunc* OptoRuntime::updateBytesCRC32C_Type() {
// create input type (domain)
int num_args = 4;
int argcnt = num_args;
const Type** fields = TypeTuple::fields(argcnt);
int argp = TypeFunc::Parms;
fields[argp++] = TypeInt::INT; // crc
fields[argp++] = TypePtr::NOTNULL; // buf
fields[argp++] = TypeInt::INT; // len
fields[argp++] = TypePtr::NOTNULL; // table
assert(argp == TypeFunc::Parms+argcnt, "correct decoding");
const TypeTuple* domain = TypeTuple::make(TypeFunc::Parms+argcnt, fields);
// result type needed
fields = TypeTuple::fields(1);
fields[TypeFunc::Parms+0] = TypeInt::INT; // crc result
const TypeTuple* range = TypeTuple::make(TypeFunc::Parms+1, fields);
return TypeFunc::make(domain, range);
}
// for cipherBlockChaining calls of aescrypt encrypt/decrypt, four pointers and a length, returning int
const TypeFunc* OptoRuntime::cipherBlockChaining_aescrypt_Type() {
// create input type (domain)

View File

@ -319,6 +319,7 @@ private:
static const TypeFunc* ghash_processBlocks_Type();
static const TypeFunc* updateBytesCRC32_Type();
static const TypeFunc* updateBytesCRC32C_Type();
// leaf on stack replacement interpreter accessor types
static const TypeFunc* osr_end_Type();

View File

@ -183,13 +183,20 @@ void SuperWord::unrolling_analysis(CountedLoopNode *cl, int &local_loop_unroll_f
break;
}
// Ignore nodes with non-primitive type.
BasicType bt;
if (n->is_Mem()) {
bt = n->as_Mem()->memory_type();
} else {
bt = n->bottom_type()->basic_type();
}
if (is_java_primitive(bt) == false) {
ignored_loop_nodes[i] = n->_idx;
continue;
}
if (n->is_Mem()) {
MemNode* current = n->as_Mem();
BasicType bt = current->memory_type();
if (is_java_primitive(bt) == false) {
ignored_loop_nodes[i] = n->_idx;
continue;
}
Node* adr = n->in(MemNode::Address);
Node* n_ctrl = _phase->get_ctrl(adr);
@ -231,11 +238,12 @@ void SuperWord::unrolling_analysis(CountedLoopNode *cl, int &local_loop_unroll_f
BasicType bt;
Node* n = lpt()->_body.at(i);
if (n->is_Store()) {
if (n->is_Mem()) {
bt = n->as_Mem()->memory_type();
} else {
bt = n->bottom_type()->basic_type();
}
if (is_java_primitive(bt) == false) continue;
int cur_max_vector = Matcher::max_vector_size(bt);

View File

@ -3753,8 +3753,12 @@ jint Arguments::apply_ergo() {
if (TieredCompilation) {
set_tiered_flags();
} else {
// Check if the policy is valid. Policies 0 and 1 are valid for non-tiered setup.
if (CompilationPolicyChoice >= 2) {
int max_compilation_policy_choice = 1;
#ifdef COMPILER2
max_compilation_policy_choice = 2;
#endif
// Check if the policy is valid.
if (CompilationPolicyChoice >= max_compilation_policy_choice) {
vm_exit_during_initialization(
"Incompatible compilation policy selected", NULL);
}

View File

@ -512,7 +512,7 @@ void StackWalkCompPolicy::method_invocation_event(methodHandle m, JavaThread* th
RegisterMap reg_map(thread, false);
javaVFrame* triggerVF = thread->last_java_vframe(&reg_map);
// triggerVF is the frame that triggered its counter
RFrame* first = new InterpretedRFrame(triggerVF->fr(), thread, m);
RFrame* first = new InterpretedRFrame(triggerVF->fr(), thread, m());
if (first->top_method()->code() != NULL) {
// called obsolete method/nmethod -- no need to recompile
@ -557,8 +557,8 @@ RFrame* StackWalkCompPolicy::findTopInlinableFrame(GrowableArray<RFrame*>* stack
if( !next ) // No next frame up the stack?
break; // Then compile with current frame
methodHandle m = current->top_method();
methodHandle next_m = next->top_method();
Method* m = current->top_method();
Method* next_m = next->top_method();
if (TraceCompilationPolicy && Verbose) {
tty->print("[caller: ");
@ -644,7 +644,7 @@ RFrame* StackWalkCompPolicy::findTopInlinableFrame(GrowableArray<RFrame*>* stack
if (TraceCompilationPolicy && Verbose) {
tty->print("\n\t check caller: ");
next_m->print_short_name(tty);
tty->print(" ( interpreted " INTPTR_FORMAT ", size=%d ) ", p2i((address)next_m()), next_m->code_size());
tty->print(" ( interpreted " INTPTR_FORMAT ", size=%d ) ", p2i((address)next_m), next_m->code_size());
}
current = next;

View File

@ -848,6 +848,9 @@ public:
product(bool, UseCRC32Intrinsics, false, \
"use intrinsics for java.util.zip.CRC32") \
\
product(bool, UseCRC32CIntrinsics, false, \
"use intrinsics for java.util.zip.CRC32C") \
\
develop(bool, TraceCallFixup, false, \
"Trace all call fixups") \
\

View File

@ -52,12 +52,12 @@ InterpretedRFrame::InterpretedRFrame(frame fr, JavaThread* thread, RFrame*const
: RFrame(fr, thread, callee) {
RegisterMap map(thread, false);
_vf = javaVFrame::cast(vframe::new_vframe(&_fr, &map, thread));
_method = methodHandle(thread, _vf->method());
_method = _vf->method();
assert( _vf->is_interpreted_frame(), "must be interpreted");
init();
}
InterpretedRFrame::InterpretedRFrame(frame fr, JavaThread* thread, methodHandle m)
InterpretedRFrame::InterpretedRFrame(frame fr, JavaThread* thread, Method* m)
: RFrame(fr, thread, NULL) {
RegisterMap map(thread, false);
_vf = javaVFrame::cast(vframe::new_vframe(&_fr, &map, thread));
@ -140,8 +140,8 @@ void CompiledRFrame::init() {
_nm = compiledVFrame::cast(vf)->code();
vf = vf->top();
_vf = javaVFrame::cast(vf);
_method = methodHandle(thread(), CodeCache::find_nmethod(_fr.pc())->method());
assert(_method(), "should have found a method");
_method = CodeCache::find_nmethod(_fr.pc())->method();
assert(_method, "should have found a method");
#ifndef PRODUCT
_invocations = _method->compiled_invocation_count();
#endif

View File

@ -60,7 +60,7 @@ class RFrame : public ResourceObj {
frame fr() const { return _fr; }
JavaThread* thread() const { return _thread; }
virtual int cost() const = 0; // estimated inlining cost (size)
virtual methodHandle top_method() const = 0;
virtual Method* top_method() const = 0;
virtual javaVFrame* top_vframe() const = 0;
virtual nmethod* nm() const { ShouldNotCallThis(); return NULL; }
@ -79,7 +79,7 @@ class CompiledRFrame : public RFrame { // frame containing a compiled method
protected:
nmethod* _nm;
javaVFrame* _vf; // top vframe; may be NULL (for most recent frame)
methodHandle _method; // top method
Method* _method; // top method
CompiledRFrame(frame fr, JavaThread* thread, RFrame*const callee);
void init();
@ -88,7 +88,7 @@ class CompiledRFrame : public RFrame { // frame containing a compiled method
public:
CompiledRFrame(frame fr, JavaThread* thread); // for nmethod triggering its counter (callee == NULL)
bool is_compiled() const { return true; }
methodHandle top_method() const { return _method; }
Method* top_method() const { return _method; }
javaVFrame* top_vframe() const { return _vf; }
nmethod* nm() const { return _nm; }
int cost() const;
@ -98,16 +98,16 @@ class CompiledRFrame : public RFrame { // frame containing a compiled method
class InterpretedRFrame : public RFrame { // interpreter frame
protected:
javaVFrame* _vf; // may be NULL (for most recent frame)
methodHandle _method;
Method* _method;
InterpretedRFrame(frame fr, JavaThread* thread, RFrame*const callee);
void init();
friend class RFrame;
public:
InterpretedRFrame(frame fr, JavaThread* thread, methodHandle m); // constructor for method triggering its invocation counter
InterpretedRFrame(frame fr, JavaThread* thread, Method* m); // constructor for method triggering its invocation counter
bool is_interpreted() const { return true; }
methodHandle top_method() const { return _method; }
Method* top_method() const { return _method; }
javaVFrame* top_vframe() const { return _vf; }
int cost() const;
void print();

View File

@ -137,6 +137,8 @@ address StubRoutines::_sha512_implCompressMB = NULL;
address StubRoutines::_updateBytesCRC32 = NULL;
address StubRoutines::_crc_table_adr = NULL;
address StubRoutines::_updateBytesCRC32C = NULL;
address StubRoutines::_multiplyToLen = NULL;
address StubRoutines::_squareToLen = NULL;
address StubRoutines::_mulAdd = NULL;

View File

@ -197,6 +197,8 @@ class StubRoutines: AllStatic {
static address _updateBytesCRC32;
static address _crc_table_adr;
static address _updateBytesCRC32C;
static address _multiplyToLen;
static address _squareToLen;
static address _mulAdd;
@ -359,6 +361,8 @@ class StubRoutines: AllStatic {
static address updateBytesCRC32() { return _updateBytesCRC32; }
static address crc_table_addr() { return _crc_table_adr; }
static address updateBytesCRC32C() { return _updateBytesCRC32C; }
static address multiplyToLen() {return _multiplyToLen; }
static address squareToLen() {return _squareToLen; }
static address mulAdd() {return _mulAdd; }

View File

@ -830,6 +830,7 @@ typedef CompactHashtable<Symbol*, char> SymbolCompactHashTable;
static_field(StubRoutines, _ghash_processBlocks, address) \
static_field(StubRoutines, _updateBytesCRC32, address) \
static_field(StubRoutines, _crc_table_adr, address) \
static_field(StubRoutines, _updateBytesCRC32C, address) \
static_field(StubRoutines, _multiplyToLen, address) \
static_field(StubRoutines, _squareToLen, address) \
static_field(StubRoutines, _mulAdd, address) \

View File

@ -147,12 +147,16 @@ needs_compact3 = \
gc/survivorAlignment \
gc/TestGCLogRotationViaJcmd.java \
runtime/InternalApi/ThreadCpuTimesDeadlock.java \
runtime/NMT/JcmdSummaryDiff.java \
runtime/RedefineTests/RedefineAnnotations.java
serviceability/sa/jmap-hashcode/Test8028623.java \
serviceability/threads/TestFalseDeadLock.java \
compiler/codecache/jmx \
compiler/jsr292/RedefineMethodUsedByMultipleMethodHandles.java \
compiler/rangechecks/TestRangeCheckSmearing.java \
serviceability/dcmd
compiler/whitebox/DeoptimizeMultipleOSRTest.java \
serviceability/dcmd \
testlibrary_tests/whitebox/vm_flags
# Compact 2 adds full VM tests
compact2 = \

View File

@ -21,6 +21,7 @@
* questions.
*/
import jdk.test.lib.Asserts;
import jdk.test.lib.Utils;
import java.lang.management.MemoryPoolMXBean;
import javax.management.Notification;
@ -80,19 +81,42 @@ public final class CodeCacheUtils {
}
/**
* A "non-nmethods" code heap is used by interpreter during bytecode
* execution, thus, it can't be predicted if this code heap usage will be
* increased or not. Same goes for 'All'.
* Checks if the usage of the code heap corresponding to 'btype' can be
* predicted at runtime if we disable compilation. The usage of the
* 'NonNMethod' code heap can not be predicted because we generate adapters
* and buffers at runtime. The 'MethodNonProfiled' code heap is also not
* predictable because we may generate compiled versions of method handle
* intrinsics while resolving methods at runtime. Same applies to 'All'.
*
* @param btype BlobType to be checked
* @return boolean value, true if respective code heap is predictable
*/
public static boolean isCodeHeapPredictable(BlobType btype) {
return btype == BlobType.MethodNonProfiled
|| btype == BlobType.MethodProfiled;
return btype == BlobType.MethodProfiled;
}
public static void disableCollectionUsageThresholds(){
/**
* Verifies that 'newValue' is equal to 'oldValue' if usage of the
* corresponding code heap is predictable. Checks the weaker condition
* 'newValue >= oldValue' if usage is not predictable because intermediate
* allocations may happen.
*
* @param btype BlobType of the code heap to be checked
* @param newValue New value to be verified
* @param oldValue Old value to be verified
* @param msg Error message if verification fails
*/
public static void assertEQorGTE(BlobType btype, long newValue, long oldValue, String msg) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
// Usage is predictable, check strong == condition
Asserts.assertEQ(newValue, oldValue, msg);
} else {
// Usage is not predictable, check weaker >= condition
Asserts.assertGTE(newValue, oldValue, msg);
}
}
public static void disableCollectionUsageThresholds() {
BlobType.getAvailable().stream()
.map(BlobType::getMemoryPool)
.filter(MemoryPoolMXBean::isCollectionUsageThresholdSupported)

View File

@ -52,10 +52,8 @@ public class GetUsageTest {
public static void main(String[] args) throws Exception {
for (BlobType btype : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
for (int allocSize = 10; allocSize < 100000; allocSize *= 10) {
new GetUsageTest(btype, allocSize).runTest();
}
for (int allocSize = 10; allocSize < 100000; allocSize *= 10) {
new GetUsageTest(btype, allocSize).runTest();
}
}
}
@ -87,13 +85,15 @@ public class GetUsageTest {
for (MemoryPoolMXBean entry : predictableBeans) {
long diff = current.get(entry) - initial.get(entry);
if (entry.equals(btype.getMemoryPool())) {
Asserts.assertFalse(diff <= 0L || diff > usageUpperEstimate,
String.format("Pool %s usage increase was reported "
+ "unexpectedly as increased by %d using "
+ "allocation size %d", entry.getName(),
diff, allocateSize));
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
Asserts.assertFalse(diff <= 0L || diff > usageUpperEstimate,
String.format("Pool %s usage increase was reported "
+ "unexpectedly as increased by %d using "
+ "allocation size %d", entry.getName(),
diff, allocateSize));
}
} else {
Asserts.assertEQ(diff, 0L,
CodeCacheUtils.assertEQorGTE(btype, diff, 0L,
String.format("Pool %s usage changed unexpectedly while"
+ " trying to increase: %s using allocation "
+ "size %d", entry.getName(),

View File

@ -52,9 +52,7 @@ public class PeakUsageTest {
public static void main(String[] args) {
for (BlobType btype : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
new PeakUsageTest(btype).runTest();
}
new PeakUsageTest(btype).runTest();
}
}
@ -65,7 +63,7 @@ public class PeakUsageTest {
CodeCacheUtils.ALLOCATION_SIZE, btype.id);
long newPeakUsage = bean.getPeakUsage().getUsed();
try {
Asserts.assertEQ(newPeakUsage, bean.getUsage().getUsed(),
CodeCacheUtils.assertEQorGTE(btype, newPeakUsage, bean.getUsage().getUsed(),
"Peak usage does not match usage after allocation for "
+ bean.getName());
} finally {
@ -73,18 +71,18 @@ public class PeakUsageTest {
CodeCacheUtils.WB.freeCodeBlob(addr);
}
}
Asserts.assertEQ(newPeakUsage, bean.getPeakUsage().getUsed(),
CodeCacheUtils.assertEQorGTE(btype, newPeakUsage, bean.getPeakUsage().getUsed(),
"Code cache peak usage has changed after usage decreased for "
+ bean.getName());
bean.resetPeakUsage();
Asserts.assertEQ(bean.getPeakUsage().getUsed(),
CodeCacheUtils.assertEQorGTE(btype, bean.getPeakUsage().getUsed(),
bean.getUsage().getUsed(),
"Code cache peak usage is not equal to usage after reset for "
+ bean.getName());
long addr2 = CodeCacheUtils.WB.allocateCodeBlob(
CodeCacheUtils.ALLOCATION_SIZE, btype.id);
try {
Asserts.assertEQ(bean.getPeakUsage().getUsed(),
CodeCacheUtils.assertEQorGTE(btype, bean.getPeakUsage().getUsed(),
bean.getUsage().getUsed(),
"Code cache peak usage is not equal to usage after fresh "
+ "allocation for " + bean.getName());

View File

@ -97,13 +97,11 @@ public class PoolsIndependenceTest implements NotificationListener {
return false;
});
for (BlobType bt : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(bt)) {
int expectedNotificationsAmount = bt.equals(btype) ? 1 : 0;
Asserts.assertEQ(counters.get(bt.getMemoryPool().getName()).get(),
expectedNotificationsAmount, String.format("Unexpected "
+ "amount of notifications for pool: %s",
bt.getMemoryPool().getName()));
}
int expectedNotificationsAmount = bt.equals(btype) ? 1 : 0;
CodeCacheUtils.assertEQorGTE(btype, counters.get(bt.getMemoryPool().getName()).get(),
expectedNotificationsAmount, String.format("Unexpected "
+ "amount of notifications for pool: %s",
bt.getMemoryPool().getName()));
}
try {
((NotificationEmitter) ManagementFactory.getMemoryMXBean()).

View File

@ -54,9 +54,7 @@ public class ThresholdNotificationsTest implements NotificationListener {
public static void main(String[] args) {
for (BlobType bt : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(bt)) {
new ThresholdNotificationsTest(bt).runTest();
}
new ThresholdNotificationsTest(bt).runTest();
}
}
@ -92,7 +90,9 @@ public class ThresholdNotificationsTest implements NotificationListener {
}
Asserts.assertTrue(
Utils.waitForCondition(
() -> counter == iterationsCount, WAIT_TIME),
() -> (CodeCacheUtils.isCodeHeapPredictable(btype) ?
(counter == iterationsCount) : (counter >= iterationsCount)),
WAIT_TIME),
"Couldn't receive expected notifications count");
try {
((NotificationEmitter) ManagementFactory.getMemoryMXBean()).

View File

@ -51,13 +51,9 @@ public class UsageThresholdExceededTest {
}
public static void main(String[] args) {
int iterationsCount =
Integer.getInteger("jdk.test.lib.iterations", 1);
int iterationsCount = Integer.getInteger("jdk.test.lib.iterations", 1);
for (BlobType btype : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
new UsageThresholdExceededTest(btype, iterationsCount)
.runTest();
}
new UsageThresholdExceededTest(btype, iterationsCount).runTest();
}
}
@ -67,9 +63,8 @@ public class UsageThresholdExceededTest {
for (int i = 0; i < iterations; i++) {
CodeCacheUtils.hitUsageThreshold(bean, btype);
}
Asserts.assertEQ(bean.getUsageThresholdCount(), oldValue + iterations,
CodeCacheUtils.assertEQorGTE(btype, bean.getUsageThresholdCount(), oldValue + iterations,
"Unexpected threshold usage count");
System.out.printf("INFO: Scenario finished successfully for %s%n",
bean.getName());
System.out.printf("INFO: Scenario finished successfully for %s%n", bean.getName());
}
}

View File

@ -27,7 +27,6 @@ import sun.hotspot.code.BlobType;
/*
* @test UsageThresholdIncreasedTest
* @ignore 8129937
* @library /testlibrary /../../test/lib
* @modules java.base/sun.misc
* java.management
@ -54,14 +53,12 @@ public class UsageThresholdIncreasedTest {
public static void main(String[] args) {
for (BlobType btype : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
new UsageThresholdIncreasedTest(btype).runTest();
}
new UsageThresholdIncreasedTest(btype).runTest();
}
}
private void checkUsageThresholdCount(MemoryPoolMXBean bean, long count){
Asserts.assertEQ(bean.getUsageThresholdCount(), count,
CodeCacheUtils.assertEQorGTE(btype, bean.getUsageThresholdCount(), count,
String.format("Usage threshold was hit: %d times for %s "
+ "Threshold value: %d with current usage: %d",
bean.getUsageThresholdCount(), bean.getName(),

View File

@ -50,9 +50,7 @@ public class UsageThresholdNotExceededTest {
public static void main(String[] args) {
for (BlobType btype : BlobType.getAvailable()) {
if (CodeCacheUtils.isCodeHeapPredictable(btype)) {
new UsageThresholdNotExceededTest(btype).runTest();
}
new UsageThresholdNotExceededTest(btype).runTest();
}
}
@ -65,13 +63,11 @@ public class UsageThresholdNotExceededTest {
- CodeCacheUtils.getHeaderSize(btype), btype.id);
// a gc cycle triggers usage threshold recalculation
CodeCacheUtils.WB.fullGC();
Asserts.assertEQ(bean.getUsageThresholdCount(), initialThresholdCount,
String.format("Usage threshold was hit: %d times for %s. "
CodeCacheUtils.assertEQorGTE(btype, bean.getUsageThresholdCount(), initialThresholdCount,
String.format("Usage threshold was hit: %d times for %s. "
+ "Threshold value: %d with current usage: %d",
bean.getUsageThresholdCount(), bean.getName(),
bean.getUsageThreshold(), bean.getUsage().getUsed()));
System.out.println("INFO: Case finished successfully for "
+ bean.getName());
System.out.println("INFO: Case finished successfully for " + bean.getName());
}
}

View File

@ -0,0 +1,221 @@
/*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 8073583
* @summary C2 support for CRC32C on SPARC
*
* @run main/othervm/timeout=600 -Xbatch TestCRC32C -m
*/
import java.nio.ByteBuffer;
import java.util.zip.Checksum;
import java.util.zip.CRC32C;
public class TestCRC32C {
public static void main(String[] args) {
int offset = Integer.getInteger("offset", 0);
int msgSize = Integer.getInteger("msgSize", 512);
boolean multi = false;
int iters = 20000;
int warmupIters = 20000;
if (args.length > 0) {
if (args[0].equals("-m")) {
multi = true;
} else {
iters = Integer.valueOf(args[0]);
}
if (args.length > 1) {
warmupIters = Integer.valueOf(args[1]);
}
}
if (multi) {
test_multi(warmupIters);
return;
}
System.out.println(" offset = " + offset);
System.out.println("msgSize = " + msgSize + " bytes");
System.out.println(" iters = " + iters);
byte[] b = initializedBytes(msgSize, offset);
CRC32C crc0 = new CRC32C();
CRC32C crc1 = new CRC32C();
CRC32C crc2 = new CRC32C();
crc0.update(b, offset, msgSize);
System.out.println("-------------------------------------------------------");
/* warm up */
for (int i = 0; i < warmupIters; i++) {
crc1.reset();
crc1.update(b, offset, msgSize);
}
/* measure performance */
long start = System.nanoTime();
for (int i = 0; i < iters; i++) {
crc1.reset();
crc1.update(b, offset, msgSize);
}
long end = System.nanoTime();
double total = (double)(end - start)/1e9; // in seconds
double thruput = (double)msgSize*iters/1e6/total; // in MB/s
System.out.println("CRC32C.update(byte[]) runtime = " + total + " seconds");
System.out.println("CRC32C.update(byte[]) throughput = " + thruput + " MB/s");
/* check correctness */
for (int i = 0; i < iters; i++) {
crc1.reset();
crc1.update(b, offset, msgSize);
if (!check(crc0, crc1)) break;
}
report("CRCs", crc0, crc1);
System.out.println("-------------------------------------------------------");
ByteBuffer buf = ByteBuffer.allocateDirect(msgSize);
buf.put(b, offset, msgSize);
buf.flip();
/* warm up */
for (int i = 0; i < warmupIters; i++) {
crc2.reset();
crc2.update(buf);
buf.rewind();
}
/* measure performance */
start = System.nanoTime();
for (int i = 0; i < iters; i++) {
crc2.reset();
crc2.update(buf);
buf.rewind();
}
end = System.nanoTime();
total = (double)(end - start)/1e9; // in seconds
thruput = (double)msgSize*iters/1e6/total; // in MB/s
System.out.println("CRC32C.update(ByteBuffer) runtime = " + total + " seconds");
System.out.println("CRC32C.update(ByteBuffer) throughput = " + thruput + " MB/s");
/* check correctness */
for (int i = 0; i < iters; i++) {
crc2.reset();
crc2.update(buf);
buf.rewind();
if (!check(crc0, crc2)) break;
}
report("CRCs", crc0, crc2);
System.out.println("-------------------------------------------------------");
}
private static void report(String s, Checksum crc0, Checksum crc1) {
System.out.printf("%s: crc0 = %08x, crc1 = %08x\n",
s, crc0.getValue(), crc1.getValue());
}
private static boolean check(Checksum crc0, Checksum crc1) {
if (crc0.getValue() != crc1.getValue()) {
System.err.printf("ERROR: crc0 = %08x, crc1 = %08x\n",
crc0.getValue(), crc1.getValue());
return false;
}
return true;
}
private static byte[] initializedBytes(int M, int offset) {
byte[] bytes = new byte[M + offset];
for (int i = 0; i < offset; i++) {
bytes[i] = (byte) i;
}
for (int i = offset; i < bytes.length; i++) {
bytes[i] = (byte) (i - offset);
}
return bytes;
}
private static void test_multi(int iters) {
int len1 = 8; // the 8B/iteration loop
int len2 = 32; // the 32B/iteration loop
int len3 = 4096; // the 4KB/iteration loop
byte[] b = initializedBytes(len3*16, 0);
int[] offsets = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 64, 128, 256, 512 };
int[] sizes = { 0, 1, 2, 3, 4, 5, 6, 7,
len1, len1+1, len1+2, len1+3, len1+4, len1+5, len1+6, len1+7,
len1*2, len1*2+1, len1*2+3, len1*2+5, len1*2+7,
len2, len2+1, len2+3, len2+5, len2+7,
len2*2, len2*4, len2*8, len2*16, len2*32, len2*64,
len3, len3+1, len3+3, len3+5, len3+7,
len3*2, len3*4, len3*8,
len1+len2, len1+len2+1, len1+len2+3, len1+len2+5, len1+len2+7,
len1+len3, len1+len3+1, len1+len3+3, len1+len3+5, len1+len3+7,
len2+len3, len2+len3+1, len2+len3+3, len2+len3+5, len2+len3+7,
len1+len2+len3, len1+len2+len3+1, len1+len2+len3+3,
len1+len2+len3+5, len1+len2+len3+7,
(len1+len2+len3)*2, (len1+len2+len3)*2+1, (len1+len2+len3)*2+3,
(len1+len2+len3)*2+5, (len1+len2+len3)*2+7,
(len1+len2+len3)*3, (len1+len2+len3)*3-1, (len1+len2+len3)*3-3,
(len1+len2+len3)*3-5, (len1+len2+len3)*3-7 };
CRC32C[] crc0 = new CRC32C[offsets.length*sizes.length];
CRC32C[] crc1 = new CRC32C[offsets.length*sizes.length];
int i, j, k;
System.out.printf("testing %d cases ...\n", offsets.length*sizes.length);
/* set the result from interpreter as reference */
for (i = 0; i < offsets.length; i++) {
for (j = 0; j < sizes.length; j++) {
crc0[i*sizes.length + j] = new CRC32C();
crc1[i*sizes.length + j] = new CRC32C();
crc0[i*sizes.length + j].update(b, offsets[i], sizes[j]);
}
}
/* warm up the JIT compiler and get result */
for (k = 0; k < iters; k++) {
for (i = 0; i < offsets.length; i++) {
for (j = 0; j < sizes.length; j++) {
crc1[i*sizes.length + j].reset();
crc1[i*sizes.length + j].update(b, offsets[i], sizes[j]);
}
}
}
/* check correctness */
for (i = 0; i < offsets.length; i++) {
for (j = 0; j < sizes.length; j++) {
if (!check(crc0[i*sizes.length + j], crc1[i*sizes.length + j])) {
System.out.printf("offsets[%d] = %d", i, offsets[i]);
System.out.printf("\tsizes[%d] = %d\n", j, sizes[j]);
}
}
}
}
}

View File

@ -314,3 +314,4 @@ d5963ccce28d7a3e96ee3e2dc8a8676e61699b70 jdk9-b66
f844a908d3308f47d73cf64e87c98d37d5d76ce8 jdk9-b69
42180703e0a362c1de7cdbf61d2cbc6609e678c4 jdk9-b70
a3200b88f259f904876b9ab13fd4c4ec2726f8ba jdk9-b71
81e85f3b6174314155991048767452a9931e12e2 jdk9-b72

View File

@ -1,171 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: Compile.java,v 1.2.4.1 2005/08/31 11:24:13 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline;
import com.sun.org.apache.xalan.internal.utils.FeatureManager;
import java.io.File;
import java.net.URL;
import java.util.Vector;
import com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt.GetOpt;
import com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt.GetOptsException;
import com.sun.org.apache.xalan.internal.xsltc.compiler.XSLTC;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg;
/**
* @author Jacek Ambroziak
* @author Santiago Pericas-Geertsen
* @author G. Todd Miller
* @author Morten Jorgensen
*/
public final class Compile {
// Versioning numbers for the compiler -v option output
private static int VERSION_MAJOR = 1;
private static int VERSION_MINOR = 4;
private static int VERSION_DELTA = 0;
// This variable should be set to false to prevent any methods in this
// class from calling System.exit(). As this is a command-line tool,
// calling System.exit() is normally OK, but we also want to allow for
// this class being used in other ways as well.
private static boolean _allowExit = true;
public static void printUsage() {
System.err.println("XSLTC version " +
VERSION_MAJOR + "." + VERSION_MINOR +
((VERSION_DELTA > 0) ? ("." + VERSION_DELTA) : ("")) + "\n" +
new ErrorMsg(ErrorMsg.COMPILE_USAGE_STR));
if (_allowExit) System.exit(-1);
}
/**
* This method implements the command line compiler. See the USAGE_STRING
* constant for a description. It may make sense to move the command-line
* handling to a separate package (ie. make one xsltc.cmdline.Compiler
* class that contains this main() method and one xsltc.cmdline.Transform
* class that contains the DefaultRun stuff).
*/
public static void main(String[] args) {
try {
boolean inputIsURL = false;
boolean useStdIn = false;
boolean classNameSet = false;
final GetOpt getopt = new GetOpt(args, "o:d:j:p:uxhsinv");
if (args.length < 1) printUsage();
final XSLTC xsltc = new XSLTC(true, new FeatureManager());
xsltc.init();
int c;
while ((c = getopt.getNextOption()) != -1) {
switch(c) {
case 'i':
useStdIn = true;
break;
case 'o':
xsltc.setClassName(getopt.getOptionArg());
classNameSet = true;
break;
case 'd':
xsltc.setDestDirectory(getopt.getOptionArg());
break;
case 'p':
xsltc.setPackageName(getopt.getOptionArg());
break;
case 'j':
xsltc.setJarFileName(getopt.getOptionArg());
break;
case 'x':
xsltc.setDebug(true);
break;
case 'u':
inputIsURL = true;
break;
case 's':
_allowExit = false;
break;
case 'n':
xsltc.setTemplateInlining(true); // used to be 'false'
break;
case 'v':
// fall through to case h
case 'h':
default:
printUsage();
break;
}
}
boolean compileOK;
if (useStdIn) {
if (!classNameSet) {
System.err.println(new ErrorMsg(ErrorMsg.COMPILE_STDIN_ERR));
if (_allowExit) System.exit(-1);
}
compileOK = xsltc.compile(System.in, xsltc.getClassName());
}
else {
// Generate a vector containg URLs for all stylesheets specified
final String[] stylesheetNames = getopt.getCmdArgs();
final Vector stylesheetVector = new Vector();
for (int i = 0; i < stylesheetNames.length; i++) {
final String name = stylesheetNames[i];
URL url;
if (inputIsURL)
url = new URL(name);
else
url = (new File(name)).toURI().toURL();
stylesheetVector.addElement(url);
}
compileOK = xsltc.compile(stylesheetVector);
}
// Compile the stylesheet and output class/jar file(s)
if (compileOK) {
xsltc.printWarnings();
if (xsltc.getJarFileName() != null) xsltc.outputToJar();
if (_allowExit) System.exit(0);
}
else {
xsltc.printWarnings();
xsltc.printErrors();
if (_allowExit) System.exit(-1);
}
}
catch (GetOptsException ex) {
System.err.println(ex);
printUsage(); // exits with code '-1'
}
catch (Exception e) {
e.printStackTrace();
if (_allowExit) System.exit(-1);
}
}
}

View File

@ -1,292 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: Transform.java,v 1.2.4.1 2005/09/12 09:07:33 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline;
import com.sun.org.apache.xalan.internal.utils.ObjectFactory;
import com.sun.org.apache.xalan.internal.xsltc.DOMEnhancedForDTM;
import com.sun.org.apache.xalan.internal.xsltc.StripFilter;
import com.sun.org.apache.xalan.internal.xsltc.TransletException;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg;
import com.sun.org.apache.xalan.internal.xsltc.dom.DOMWSFilter;
import com.sun.org.apache.xalan.internal.xsltc.dom.XSLTCDTMManager;
import com.sun.org.apache.xalan.internal.xsltc.runtime.AbstractTranslet;
import com.sun.org.apache.xalan.internal.xsltc.runtime.Constants;
import com.sun.org.apache.xalan.internal.xsltc.runtime.Parameter;
import com.sun.org.apache.xalan.internal.xsltc.runtime.output.TransletOutputHandlerFactory;
import com.sun.org.apache.xml.internal.dtm.DTMWSFilter;
import com.sun.org.apache.xml.internal.serializer.SerializationHandler;
import java.io.FileNotFoundException;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.util.Vector;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.sax.SAXSource;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
/**
* @author Jacek Ambroziak
* @author Santiago Pericas-Geertsen
* @author G. Todd Miller
* @author Morten Jorgensen
*/
final public class Transform {
private SerializationHandler _handler;
private String _fileName;
private String _className;
private String _jarFileSrc;
private boolean _isJarFileSpecified = false;
private Vector _params = null;
private boolean _uri, _debug;
private int _iterations;
public Transform(String className, String fileName,
boolean uri, boolean debug, int iterations) {
_fileName = fileName;
_className = className;
_uri = uri;
_debug = debug;
_iterations = iterations;
}
public String getFileName(){return _fileName;}
public String getClassName(){return _className;}
public void setParameters(Vector params) {
_params = params;
}
private void setJarFileInputSrc(boolean flag, String jarFile) {
// TODO: at this time we do not do anything with this
// information, attempts to add the jarfile to the CLASSPATH
// were successful via System.setProperty, but the effects
// were not visible to the running JVM. For now we add jarfile
// to CLASSPATH in the wrapper script that calls this program.
_isJarFileSpecified = flag;
// TODO verify jarFile exists...
_jarFileSrc = jarFile;
}
private void doTransform() {
try {
final Class clazz = ObjectFactory.findProviderClass(_className, true);
final AbstractTranslet translet = (AbstractTranslet)clazz.newInstance();
translet.postInitialization();
// Create a SAX parser and get the XMLReader object it uses
final SAXParserFactory factory = SAXParserFactory.newInstance();
try {
factory.setFeature(Constants.NAMESPACE_FEATURE,true);
}
catch (Exception e) {
factory.setNamespaceAware(true);
}
final SAXParser parser = factory.newSAXParser();
final XMLReader reader = parser.getXMLReader();
// Set the DOM's DOM builder as the XMLReader's SAX2 content handler
XSLTCDTMManager dtmManager =
XSLTCDTMManager.createNewDTMManagerInstance();
DTMWSFilter wsfilter;
if (translet != null && translet instanceof StripFilter) {
wsfilter = new DOMWSFilter(translet);
} else {
wsfilter = null;
}
final DOMEnhancedForDTM dom =
(DOMEnhancedForDTM)dtmManager.getDTM(
new SAXSource(reader, new InputSource(_fileName)),
false, wsfilter, true, false, translet.hasIdCall());
dom.setDocumentURI(_fileName);
translet.prepassDocument(dom);
// Pass global parameters
int n = _params.size();
for (int i = 0; i < n; i++) {
Parameter param = (Parameter) _params.elementAt(i);
translet.addParameter(param._name, param._value);
}
// Transform the document
TransletOutputHandlerFactory tohFactory =
TransletOutputHandlerFactory.newInstance();
tohFactory.setOutputType(TransletOutputHandlerFactory.STREAM);
tohFactory.setEncoding(translet._encoding);
tohFactory.setOutputMethod(translet._method);
if (_iterations == -1) {
translet.transform(dom, tohFactory.getSerializationHandler());
}
else if (_iterations > 0) {
long mm = System.currentTimeMillis();
for (int i = 0; i < _iterations; i++) {
translet.transform(dom,
tohFactory.getSerializationHandler());
}
mm = System.currentTimeMillis() - mm;
System.err.println("\n<!--");
System.err.println(" transform = "
+ (((double) mm) / ((double) _iterations))
+ " ms");
System.err.println(" throughput = "
+ (1000.0 / (((double) mm)
/ ((double) _iterations)))
+ " tps");
System.err.println("-->");
}
}
catch (TransletException e) {
if (_debug) e.printStackTrace();
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
e.getMessage());
}
catch (RuntimeException e) {
if (_debug) e.printStackTrace();
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
e.getMessage());
}
catch (FileNotFoundException e) {
if (_debug) e.printStackTrace();
ErrorMsg err = new ErrorMsg(ErrorMsg.FILE_NOT_FOUND_ERR, _fileName);
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
err.toString());
}
catch (MalformedURLException e) {
if (_debug) e.printStackTrace();
ErrorMsg err = new ErrorMsg(ErrorMsg.INVALID_URI_ERR, _fileName);
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
err.toString());
}
catch (ClassNotFoundException e) {
if (_debug) e.printStackTrace();
ErrorMsg err= new ErrorMsg(ErrorMsg.CLASS_NOT_FOUND_ERR,_className);
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
err.toString());
}
catch (UnknownHostException e) {
if (_debug) e.printStackTrace();
ErrorMsg err = new ErrorMsg(ErrorMsg.INVALID_URI_ERR, _fileName);
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
err.toString());
}
catch (SAXException e) {
Exception ex = e.getException();
if (_debug) {
if (ex != null) ex.printStackTrace();
e.printStackTrace();
}
System.err.print(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY));
if (ex != null)
System.err.println(ex.getMessage());
else
System.err.println(e.getMessage());
}
catch (Exception e) {
if (_debug) e.printStackTrace();
System.err.println(new ErrorMsg(ErrorMsg.RUNTIME_ERROR_KEY)+
e.getMessage());
}
}
public static void printUsage() {
System.err.println(new ErrorMsg(ErrorMsg.TRANSFORM_USAGE_STR));
}
public static void main(String[] args) {
try {
if (args.length > 0) {
int i;
int iterations = -1;
boolean uri = false, debug = false;
boolean isJarFileSpecified = false;
String jarFile = null;
// Parse options starting with '-'
for (i = 0; i < args.length && args[i].charAt(0) == '-'; i++) {
if (args[i].equals("-u")) {
uri = true;
}
else if (args[i].equals("-x")) {
debug = true;
}
else if (args[i].equals("-j")) {
isJarFileSpecified = true;
jarFile = args[++i];
}
else if (args[i].equals("-n")) {
try {
iterations = Integer.parseInt(args[++i]);
}
catch (NumberFormatException e) {
// ignore
}
}
else {
printUsage();
}
}
// Enough arguments left ?
if (args.length - i < 2) printUsage();
// Get document file and class name
Transform handler = new Transform(args[i+1], args[i], uri,
debug, iterations);
handler.setJarFileInputSrc(isJarFileSpecified, jarFile);
// Parse stylesheet parameters
Vector params = new Vector();
for (i += 2; i < args.length; i++) {
final int equal = args[i].indexOf('=');
if (equal > 0) {
final String name = args[i].substring(0, equal);
final String value = args[i].substring(equal+1);
params.addElement(new Parameter(name, value));
}
else {
printUsage();
}
}
if (i == args.length) {
handler.setParameters(params);
handler.doTransform();
}
} else {
printUsage();
}
}
catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,258 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: GetOpt.java,v 1.2.4.1 2005/08/31 11:46:04 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg;
/**
* GetOpt is a Java equivalent to the C getopt() library function
* discussed in man page getopt(3C). It provides command line
* parsing for Java applications. It supports the most rules of the
* command line standard (see man page intro(1)) including stacked
* options such as '-sxm' (which is equivalent to -s -x -m); it
* handles special '--' option that signifies the end of options.
* Additionally this implementation of getopt will check for
* mandatory arguments to options such as in the case of
* '-d <file>' it will throw a MissingOptArgException if the
* option argument '<file>' is not included on the commandline.
* getopt(3C) does not check for this.
* @author G Todd Miller
*/
public class GetOpt{
public GetOpt(String[] args, String optString){
theOptions = new ArrayList();
int currOptIndex = 0;
theCmdArgs = new ArrayList();
theOptionMatcher = new OptionMatcher(optString);
// fill in the options list
for(int i=0; i<args.length; i++){
String token = args[i];
int tokenLength = token.length();
if(token.equals("--")){ // end of opts
currOptIndex = i+1; // set index of first operand
break; // end of options
}
else if(token.startsWith("-") && tokenLength == 2){
// simple option token such as '-s' found
theOptions.add(new Option(token.charAt(1)));
}
else if(token.startsWith("-") && tokenLength > 2){
// stacked options found, such as '-shm'
// iterate thru the tokens after the dash and
// add them to theOptions list
for(int j=1; j<tokenLength; j++){
theOptions.add(new Option(token.charAt(j)));
}
}
else if(!token.startsWith("-")){
// case 1- there are not options stored yet therefore
// this must be an command argument, not an option argument
if(theOptions.size() == 0){
currOptIndex = i;
break; // stop processing options
}
else {
// case 2-
// there are options stored, check to see if
// this arg belong to the last arg stored
int indexoflast=0;
indexoflast = theOptions.size()-1;
Option op = (Option)theOptions.get(indexoflast);
char opLetter = op.getArgLetter();
if(!op.hasArg() && theOptionMatcher.hasArg(opLetter)){
op.setArg(token);
}
else{
// case 3 -
// the last option stored does not take
// an argument, so again, this argument
// must be a command argument, not
// an option argument
currOptIndex = i;
break; // end of options
}
}
}// end option does not start with "-"
} // end for args loop
// attach an iterator to list of options
theOptionsIterator = theOptions.listIterator();
// options are done, now fill out cmd arg list with remaining args
for(int i=currOptIndex; i<args.length; i++){
String token = args[i];
theCmdArgs.add(token);
}
}
/**
* debugging routine to print out all options collected
*/
public void printOptions(){
for(ListIterator it=theOptions.listIterator(); it.hasNext();){
Option opt = (Option)it.next();
System.out.print("OPT =" + opt.getArgLetter());
String arg = opt.getArgument();
if(arg != null){
System.out.print(" " + arg);
}
System.out.println();
}
}
/**
* gets the next option found in the commandline. Distinguishes
* between two bad cases, one case is when an illegal option
* is found, and then other case is when an option takes an
* argument but no argument was found for that option.
* If the option found was not declared in the optString, then
* an IllegalArgumentException will be thrown (case 1).
* If the next option found has been declared to take an argument,
* and no such argument exists, then a MissingOptArgException
* is thrown (case 2).
* @param none
* @return int - the next option found.
* @throws IllegalArgumentException, MissingOptArgException.
*/
public int getNextOption() throws IllegalArgumentException,
MissingOptArgException
{
int retval = -1;
if(theOptionsIterator.hasNext()){
theCurrentOption = (Option)theOptionsIterator.next();
char c = theCurrentOption.getArgLetter();
boolean shouldHaveArg = theOptionMatcher.hasArg(c);
String arg = theCurrentOption.getArgument();
if(!theOptionMatcher.match(c)) {
ErrorMsg msg = new ErrorMsg(ErrorMsg.ILLEGAL_CMDLINE_OPTION_ERR,
new Character(c));
throw (new IllegalArgumentException(msg.toString()));
}
else if(shouldHaveArg && (arg == null)) {
ErrorMsg msg = new ErrorMsg(ErrorMsg.CMDLINE_OPT_MISSING_ARG_ERR,
new Character(c));
throw (new MissingOptArgException(msg.toString()));
}
retval = c;
}
return retval;
}
/**
* gets the argument for the current parsed option. For example,
* in case of '-d <file>', if current option parsed is 'd' then
* getOptionArg() would return '<file>'.
* @return String - argument for current parsed option.
* @param none
*/
public String getOptionArg(){
String retval = null;
String tmp = theCurrentOption.getArgument();
char c = theCurrentOption.getArgLetter();
if(theOptionMatcher.hasArg(c)){
retval = tmp;
}
return retval;
}
/**
* gets list of the commandline arguments. For example, in command
* such as 'cmd -s -d file file2 file3 file4' with the usage
* 'cmd [-s] [-d <file>] <file>...', getCmdArgs() would return
* the list {file2, file3, file4}.
* @return String[] - list of command arguments that may appear
* after options and option arguments.
* @params none
*/
public String[] getCmdArgs(){
String[] retval = new String[theCmdArgs.size()];
int i=0;
for(ListIterator it=theCmdArgs.listIterator(); it.hasNext();){
retval[i++] = (String)it.next();
}
return retval;
}
private Option theCurrentOption = null;
private ListIterator theOptionsIterator;
private List theOptions = null;
private List theCmdArgs = null;
private OptionMatcher theOptionMatcher = null;
///////////////////////////////////////////////////////////
//
// Inner Classes
//
///////////////////////////////////////////////////////////
// inner class to model an option
class Option{
private char theArgLetter;
private String theArgument = null;
public Option(char argLetter) { theArgLetter = argLetter; }
public void setArg(String arg) {
theArgument = arg;
}
public boolean hasArg() { return (theArgument != null); }
public char getArgLetter() { return theArgLetter; }
public String getArgument() { return theArgument; }
} // end class Option
// inner class to query optString for a possible option match,
// and whether or not a given legal option takes an argument.
//
class OptionMatcher{
public OptionMatcher(String optString){
theOptString = optString;
}
public boolean match(char c){
boolean retval = false;
if(theOptString.indexOf(c) != -1){
retval = true;
}
return retval;
}
public boolean hasArg(char c){
boolean retval = false;
int index = theOptString.indexOf(c)+1;
if (index == theOptString.length()){
// reached end of theOptString
retval = false;
}
else if(theOptString.charAt(index) == ':'){
retval = true;
}
return retval;
}
private String theOptString = null;
} // end class OptionMatcher
}// end class GetOpt

View File

@ -1,34 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: GetOptsException.java,v 1.2.4.1 2005/08/31 11:47:06 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt;
/**
* @author G Todd Miller
*/
public class GetOptsException extends Exception{
static final long serialVersionUID = 8736874967183039804L;
public GetOptsException(String msg){
super(msg);
}
}

View File

@ -1,32 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: IllegalArgumentException.java,v 1.2.4.1 2005/08/31 11:47:56 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt;
class IllegalArgumentException extends GetOptsException{
static final long serialVersionUID = 8642122427294793651L;
public IllegalArgumentException(String msg){
super(msg);
}
}

View File

@ -1,35 +0,0 @@
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: MissingOptArgException.java,v 1.2.4.1 2005/08/31 11:49:21 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt;
/**
* @author G Todd Miller
*/
class MissingOptArgException extends GetOptsException{
static final long serialVersionUID = -1972471465394544822L;
public MissingOptArgException(String msg){
super(msg);
}
}

View File

@ -49,7 +49,7 @@ public interface XMLEventReader extends Iterator {
* Get the next XMLEvent
* @see XMLEvent
* @throws XMLStreamException if there is an error with the underlying XML.
* @throws NoSuchElementException iteration has no more elements.
* @throws java.util.NoSuchElementException iteration has no more elements.
*/
public XMLEvent nextEvent() throws XMLStreamException;

View File

@ -170,7 +170,7 @@ public interface XMLEvent extends javax.xml.stream.XMLStreamConstants {
* infoset expressed.
*
* @param writer The writer that will output the data
* @throws XMLStreamException if there is a fatal error writing the event
* @throws javax.xml.stream.XMLStreamException if there is a fatal error writing the event
*/
public void writeAsEncodedUnicode(Writer writer)
throws javax.xml.stream.XMLStreamException;

View File

@ -317,3 +317,4 @@ b5878b03d1b2e105917d959fbfa3c57c22495803 jdk9-b68
f5911c6155c29ac24b6f9068273207e5ebd3a3df jdk9-b69
94084caa27a3c8a09a7510aef596ebd64e97c569 jdk9-b70
61caeb7061bbf8cc74a767997e5d17cc00712629 jdk9-b71
1d87054e2d2f405c114f0061b97cbf8214bddf0a jdk9-b72

View File

@ -314,3 +314,4 @@ ed94f3e7ba6bbfec0772de6d24e39543e13f6d88 jdk9-b65
551323004d0ce2f1d4b0e99552f7e0cdcebc6fca jdk9-b69
a7f731125b7fb0e4b0186172f85a21e2d5139f7e jdk9-b70
e47d3bfbc61accc3fbd372a674fdce2933b54f31 jdk9-b71
f376824d4940f45719d91838f3f6249f873440db jdk9-b72

View File

@ -1,34 +0,0 @@
This directory contains tools and tests associated with creating the
class list for class data sharing.
The class list is produced by running the refWorkload startup3 benchmark with
the -XX:+TraceClassLoadingPreorder option. The -Xshare:off option must also be
used so that bootclasspath classes are loaded from rt.jar. The MakeClasslist
program should be built into the jar file makeclasslist.jar and is run
on one of the logs from each of the benchmarks in the following fashion:
cd .../<resultsdir>/results.startup3
$JAVA_HOME/bin/java -jar makeclasslist.jar results.Noop/results_1/log results.Framer/results_1/log results.XFramer/results_1/log results.JEdit/results_1/log results.LimeWire/results_1/log results.NetBeans50/results_1/log
Presently, $JAVA_HOME must be the same path used to run the startup3 benchmark.
The logs are deliberately concatenated in roughly smallest to largest order
based on application size. The resulting output is redirected into a file
and results in one of classlist.solaris, classlist.linux, classlist.macosx,
or classlist.windows. These files are checked in to the workspace. A
necessary checksum (AddJsum.java) is added to the final classlist
(installed in lib/ or jre/lib/) during the build process by the
makefiles in make/java/redist.
In a forthcoming JDK build we plan to manually add the dependent
classes for the calendar manager Glow, which pulls in the Preferences
classes and, on Unix platforms, the XML parsing classes.
The properties file supplied to the refworkload is approximately the
following:
javahome=/usr/java/j2sdk1.8.0
resultsdir=classlist-run
iterations=1
benchmarks=startup3
globalvmoptions=-client -Xshare:off -XX:+TraceClassLoadingPreorder

View File

@ -1,130 +0,0 @@
/*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package build.tools.makeclasslist;
import java.io.*;
import java.util.*;
import java.util.jar.*;
/** Reads a set of files containing the output of java
-XX:+TraceClassLoadingPreorder runs. Finds all classes that were
loaded from the bootstrap class path by comparing the prefix of
the load path to the current JRE's java.home system property.
Prints the names of these classes to stdout.
*/
public class MakeClasslist {
public static void main(String[] args) throws IOException {
List<String> classes = new ArrayList<>();
String origJavaHome = System.getProperty("java.home");
String javaHome = origJavaHome.toLowerCase();
if (javaHome.endsWith("jre")) {
origJavaHome = origJavaHome.substring(0, origJavaHome.length() - 4);
javaHome = javaHome.substring(0, javaHome.length() - 4);
}
for (int i = 0; i < args.length; i++) {
try {
File file = new File(args[i]);
BufferedReader reader = new BufferedReader(new FileReader(file));
String line = null;
while ((line = reader.readLine()) != null) {
StringTokenizer tok = new StringTokenizer(line, "[ \t\n\r\f");
if (tok.hasMoreTokens()) {
String t = tok.nextToken();
// Understand only "Loading" from -XX:+TraceClassLoadingPreorder.
// This ignores old "Loaded" from -verbose:class to force correct
// classlist generation on Mustang.
if (t.equals("Loading")) {
t = tok.nextToken();
t = t.replace('.', '/');
// Check to make sure it came from the boot class path
if (tok.hasMoreTokens()) {
String tmp = tok.nextToken();
if (tmp.equals("from")) {
if (tok.hasMoreTokens()) {
tmp = tok.nextToken().toLowerCase();
// System.err.println("Loaded " + t + " from " + tmp);
if (tmp.startsWith(javaHome)) {
// OK, remember this class for later
classes.add(t);
}
}
}
}
}
}
}
} catch (IOException e) {
System.err.println("Error reading file " + args[i]);
throw(e);
}
}
Set<String> seenClasses = new HashSet<>();
for (String str : classes) {
if (seenClasses.add(str)) {
System.out.println(str);
}
}
// Try to complete certain packages
// Note: not using this new code yet; need to consider whether the
// footprint increase is worth any startup gains
// Note also that the packages considered below for completion are
// (obviously) platform-specific
// JarFile rtJar = new JarFile(origJavaHome + File.separator +
// "jre" + File.separator +
// "lib" + File.separator +
// "rt.jar");
// completePackage(seenClasses, rtJar, "java/awt");
// completePackage(seenClasses, rtJar, "sun/awt");
// completePackage(seenClasses, rtJar, "sun/awt/X11");
// completePackage(seenClasses, rtJar, "java/awt/im/spi");
// completePackage(seenClasses, rtJar, "java/lang");
}
private static void completePackage(Set<String> seenClasses,
JarFile jar,
String packageName) {
int len = packageName.length();
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
String name = entry.getName();
if (name.startsWith(packageName) &&
name.endsWith(".class") &&
name.lastIndexOf('/') == len) {
// Trim ".class" from end
name = name.substring(0, name.length() - 6);
if (seenClasses.add(name)) {
System.out.println(name);
}
}
}
}
}

View File

@ -0,0 +1,62 @@
/*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* This tool is used to help create the class list for class data sharing.
*
* The classlist is produced internally by first running a select number of
* startup benchmarks with the -XX:DumpLoadedClassList=<file> option, then
* running this tool in the following fashion to produce a complete classlist:
*
* jjs -scripting makeClasslist.js -- list1 list2 list3 > classlist.platform
*
* The lists should be listed in roughly smallest to largest order based on
* application size.
*
* After generating the classlist it's necessary to add a checksum (using
* AddJsum.java) before checking it into the workspace as the corresponding
* platform-specific classlist, such as make/data/classlist/classlist.linux
*/
"use strict";
var classlist = [];
var seenClasses = {};
for (var a in $ARG) {
var arg = $ARG[a];
var classes = readFully(arg).replace(/[\r\n]+/g, "\n").split("\n");
for (var c in classes) {
var clazz = classes[c];
if (clazz !== "" && seenClasses[clazz] === undefined) {
seenClasses[clazz] = clazz;
classlist.push(clazz);
}
}
}
for (c in classlist) {
print(classlist[c]);
}

View File

@ -44,14 +44,14 @@ import java.io.IOException;
* instance of a subclass of {@code ContentHandler}, and its
* {@code getContent} method is called to create the object.
* <p>
* If no content handler could be found, URLConnection will
* look for a content handler in a user-defineable set of places.
* If no content handler could be {@linkplain URLConnection#getContent() found},
* URLConnection will look for a content handler in a user-definable set of places.
* Users can define a vertical-bar delimited set of class prefixes
* to search through by defining the <i>java.content.handler.pkgs</i>
* to search through by defining the <i>{@value java.net.URLConnection#contentPathProp}</i>
* property. The class name must be of the form:
* <blockquote>
* <i>{package-prefix}.{major}.{minor}</i>
* <P>
* <p>
* where <i>{major}.{minor}</i> is formed by taking the
* content-type string, replacing all slash characters with a
* {@code period} ('.'), and all other non-alphanumeric characters
@ -82,6 +82,7 @@ import java.io.IOException;
* @since 1.0
*/
abstract public class ContentHandler {
/**
* Given a URL connect stream positioned at the beginning of the
* representation of an object, this method reads that stream and
@ -104,8 +105,8 @@ abstract public class ContentHandler {
* @param urlc a URL connection.
* @param classes an array of types requested
* @return the object read by the {@code ContentHandler} that is
* the first match of the suggested types.
* null if none of the requested are supported.
* the first match of the suggested types or
* {@code null} if none of the requested are supported.
* @exception IOException if an I/O error occurs while reading the object.
* @since 1.3
*/
@ -113,12 +114,11 @@ abstract public class ContentHandler {
public Object getContent(URLConnection urlc, Class[] classes) throws IOException {
Object obj = getContent(urlc);
for (int i = 0; i < classes.length; i++) {
if (classes[i].isInstance(obj)) {
for (Class<?> c : classes) {
if (c.isInstance(obj)) {
return obj;
}
}
}
return null;
}
}

View File

@ -39,12 +39,13 @@ package java.net;
* @since 1.0
*/
public interface ContentHandlerFactory {
/**
* Creates a new {@code ContentHandler} to read an object from
* a {@code URLStreamHandler}.
*
* @param mimetype the MIME type for which a content handler is desired.
*
* @return a new {@code ContentHandler} to read an object from a
* {@code URLStreamHandler}.
* @see java.net.ContentHandler

View File

@ -28,8 +28,12 @@ package java.net;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.PrivilegedAction;
import java.util.Hashtable;
import java.util.Date;
import java.util.Iterator;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import java.util.StringTokenizer;
import java.util.Collections;
import java.util.Map;
@ -107,7 +111,7 @@ import sun.net.www.MessageHeader;
* <li>{@code getContentType}
* <li>{@code getDate}
* <li>{@code getExpiration}
* <li>{@code getLastModifed}
* <li>{@code getLastModified}
* </ul>
* <p>
* provide convenient access to these fields. The
@ -695,16 +699,30 @@ public abstract class URLConnection {
* This method first determines the content type of the object by
* calling the {@code getContentType} method. If this is
* the first time that the application has seen that specific content
* type, a content handler for that content type is created:
* type, a content handler for that content type is created.
* <p> This is done as follows:
* <ol>
* <li>If the application has set up a content handler factory instance
* using the {@code setContentHandlerFactory} method, the
* {@code createContentHandler} method of that instance is called
* with the content type as an argument; the result is a content
* handler for that content type.
* <li>If no content handler factory has yet been set up, or if the
* factory's {@code createContentHandler} method returns
* {@code null}, then this method tries to load a content handler
* <li>If no {@code ContentHandlerFactory} has yet been set up,
* or if the factory's {@code createContentHandler} method
* returns {@code null}, then the {@linkplain java.util.ServiceLoader
* ServiceLoader} mechanism is used to locate {@linkplain
* java.net.ContentHandlerFactory ContentHandlerFactory}
* implementations using the system class
* loader. The order that factories are located is implementation
* specific, and an implementation is free to cache the located
* factories. A {@linkplain java.util.ServiceConfigurationError
* ServiceConfigurationError}, {@code Error} or {@code RuntimeException}
* thrown from the {@code createContentHandler}, if encountered, will
* be propagated to the calling thread. The {@code
* createContentHandler} method of each factory, if instantiated, is
* invoked, with the content type, until a factory returns non-null,
* or all factories have been exhausted.
* <li>Failing that, this method tries to load a content handler
* class as defined by {@link java.net.ContentHandler ContentHandler}.
* If the class does not exist, or is not a subclass of {@code
* ContentHandler}, then an {@code UnknownServiceException} is thrown.
@ -855,8 +873,7 @@ public abstract class URLConnection {
* @see #getDoInput()
*/
public void setDoInput(boolean doinput) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
doInput = doinput;
}
@ -885,8 +902,7 @@ public abstract class URLConnection {
* @see #getDoOutput()
*/
public void setDoOutput(boolean dooutput) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
doOutput = dooutput;
}
@ -911,8 +927,7 @@ public abstract class URLConnection {
* @see #getAllowUserInteraction()
*/
public void setAllowUserInteraction(boolean allowuserinteraction) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
allowUserInteraction = allowuserinteraction;
}
@ -974,8 +989,7 @@ public abstract class URLConnection {
* @see #getUseCaches()
*/
public void setUseCaches(boolean usecaches) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
useCaches = usecaches;
}
@ -1000,8 +1014,7 @@ public abstract class URLConnection {
* @see #getIfModifiedSince()
*/
public void setIfModifiedSince(long ifmodifiedsince) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
ifModifiedSince = ifmodifiedsince;
}
@ -1055,12 +1068,11 @@ public abstract class URLConnection {
* (e.g., "{@code Accept}").
* @param value the value associated with it.
* @throws IllegalStateException if already connected
* @throws NullPointerException if key is <CODE>null</CODE>
* @throws NullPointerException if key is {@code null}
* @see #getRequestProperty(java.lang.String)
*/
public void setRequestProperty(String key, String value) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
if (key == null)
throw new NullPointerException ("key is null");
@ -1084,8 +1096,7 @@ public abstract class URLConnection {
* @since 1.4
*/
public void addRequestProperty(String key, String value) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
if (key == null)
throw new NullPointerException ("key is null");
@ -1107,8 +1118,7 @@ public abstract class URLConnection {
* @see #setRequestProperty(java.lang.String, java.lang.String)
*/
public String getRequestProperty(String key) {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
if (requests == null)
return null;
@ -1129,8 +1139,7 @@ public abstract class URLConnection {
* @since 1.4
*/
public Map<String,List<String>> getRequestProperties() {
if (connected)
throw new IllegalStateException("Already connected");
checkConnected();
if (requests == null)
return Collections.emptyMap();
@ -1183,7 +1192,7 @@ public abstract class URLConnection {
/**
* The ContentHandler factory.
*/
static ContentHandlerFactory factory;
private static volatile ContentHandlerFactory factory;
/**
* Sets the {@code ContentHandlerFactory} of an
@ -1216,37 +1225,45 @@ public abstract class URLConnection {
factory = fac;
}
private static Hashtable<String, ContentHandler> handlers = new Hashtable<>();
private static final Hashtable<String, ContentHandler> handlers = new Hashtable<>();
/**
* Gets the Content Handler appropriate for this connection.
*/
synchronized ContentHandler getContentHandler()
throws UnknownServiceException
{
private ContentHandler getContentHandler() throws UnknownServiceException {
String contentType = stripOffParameters(getContentType());
ContentHandler handler = null;
if (contentType == null)
if (contentType == null) {
throw new UnknownServiceException("no content-type");
try {
handler = handlers.get(contentType);
if (handler != null)
return handler;
} catch(Exception e) {
}
if (factory != null)
ContentHandler handler = handlers.get(contentType);
if (handler != null)
return handler;
if (factory != null) {
handler = factory.createContentHandler(contentType);
if (handler == null) {
try {
handler = lookupContentHandlerClassFor(contentType);
} catch(Exception e) {
e.printStackTrace();
handler = UnknownContentHandler.INSTANCE;
}
handlers.put(contentType, handler);
if (handler != null)
return handler;
}
return handler;
handler = lookupContentHandlerViaProvider(contentType);
if (handler != null) {
ContentHandler h = handlers.putIfAbsent(contentType, handler);
return h != null ? h : handler;
}
try {
handler = lookupContentHandlerClassFor(contentType);
} catch (Exception e) {
e.printStackTrace();
handler = UnknownContentHandler.INSTANCE;
}
assert handler != null;
ContentHandler h = handlers.putIfAbsent(contentType, handler);
return h != null ? h : handler;
}
/*
@ -1270,10 +1287,10 @@ public abstract class URLConnection {
private static final String contentPathProp = "java.content.handler.pkgs";
/**
* Looks for a content handler in a user-defineable set of places.
* By default it looks in sun.net.www.content, but users can define a
* vertical-bar delimited set of class prefixes to search through in
* addition by defining the java.content.handler.pkgs property.
* Looks for a content handler in a user-definable set of places.
* By default it looks in {@value #contentClassPrefix}, but users can define
* a vertical-bar delimited set of class prefixes to search through in
* addition by defining the {@value #contentPathProp} property.
* The class name must be of the form:
* <pre>
* {package-prefix}.{major}.{minor}
@ -1281,11 +1298,10 @@ public abstract class URLConnection {
* YoyoDyne.experimental.text.plain
* </pre>
*/
private ContentHandler lookupContentHandlerClassFor(String contentType)
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
private ContentHandler lookupContentHandlerClassFor(String contentType) {
String contentHandlerClassName = typeToPackageName(contentType);
String contentHandlerPkgPrefixes =getContentHandlerPkgPrefixes();
String contentHandlerPkgPrefixes = getContentHandlerPkgPrefixes();
StringTokenizer packagePrefixIter =
new StringTokenizer(contentHandlerPkgPrefixes, "|");
@ -1305,17 +1321,46 @@ public abstract class URLConnection {
}
}
if (cls != null) {
ContentHandler handler =
(ContentHandler)cls.newInstance();
return handler;
return (ContentHandler) cls.newInstance();
}
} catch(Exception e) {
}
} catch(Exception ignored) { }
}
return UnknownContentHandler.INSTANCE;
}
private ContentHandler lookupContentHandlerViaProvider(String contentType) {
return AccessController.doPrivileged(
new PrivilegedAction<>() {
@Override
public ContentHandler run() {
ClassLoader cl = ClassLoader.getSystemClassLoader();
ServiceLoader<ContentHandlerFactory> sl =
ServiceLoader.load(ContentHandlerFactory.class, cl);
Iterator<ContentHandlerFactory> iterator = sl.iterator();
ContentHandler handler = null;
while (iterator.hasNext()) {
ContentHandlerFactory f;
try {
f = iterator.next();
} catch (ServiceConfigurationError e) {
if (e.getCause() instanceof SecurityException) {
continue;
}
throw e;
}
handler = f.createContentHandler(contentType);
if (handler != null) {
break;
}
}
return handler;
}
});
}
/**
* Utility function to map a MIME content type into an equivalent
* pair of class name components. For example: "text/html" would
@ -1345,8 +1390,8 @@ public abstract class URLConnection {
* Returns a vertical bar separated list of package prefixes for potential
* content handlers. Tries to get the java.content.handler.pkgs property
* to use as a set of package prefixes to search. Whether or not
* that property has been defined, the sun.net.www.content is always
* the last one on the returned package list.
* that property has been defined, the {@value #contentClassPrefix}
* is always the last one on the returned package list.
*/
private String getContentHandlerPkgPrefixes() {
String packagePrefixList = AccessController.doPrivileged(
@ -1764,9 +1809,12 @@ public abstract class URLConnection {
return skipped;
}
private void checkConnected() {
if (connected)
throw new IllegalStateException("Already connected");
}
}
class UnknownContentHandler extends ContentHandler {
static final ContentHandler INSTANCE = new UnknownContentHandler();

View File

@ -76,7 +76,7 @@ import sun.security.util.SecurityConstants;
public final class AccessControlContext {
private ProtectionDomain context[];
private ProtectionDomain[] context;
// isPrivileged and isAuthorized are referenced by the VM - do not remove
// or change their names
private boolean isPrivileged;
@ -89,13 +89,13 @@ public final class AccessControlContext {
private DomainCombiner combiner = null;
// limited privilege scope
private Permission permissions[];
private Permission[] permissions;
private AccessControlContext parent;
private boolean isWrapped;
// is constrained by limited privilege scope?
private boolean isLimited;
private ProtectionDomain limitedContext[];
private ProtectionDomain[] limitedContext;
private static boolean debugInit = false;
private static Debug debug = null;
@ -123,7 +123,7 @@ public final class AccessControlContext {
* changes to the array will not affect this AccessControlContext.
* @throws NullPointerException if {@code context} is {@code null}
*/
public AccessControlContext(ProtectionDomain context[])
public AccessControlContext(ProtectionDomain[] context)
{
if (context.length == 0) {
this.context = null;
@ -282,7 +282,7 @@ public final class AccessControlContext {
* package private constructor for AccessController.getContext()
*/
AccessControlContext(ProtectionDomain context[],
AccessControlContext(ProtectionDomain[] context,
boolean isPrivileged)
{
this.context = context;
@ -643,7 +643,7 @@ public final class AccessControlContext {
/*
* Combine the current (stack) and assigned domains.
*/
private static ProtectionDomain[] combine(ProtectionDomain[]current,
private static ProtectionDomain[] combine(ProtectionDomain[] current,
ProtectionDomain[] assigned) {
// current could be null if only system code is on the stack;
@ -666,7 +666,7 @@ public final class AccessControlContext {
int n = (skipAssigned) ? 0 : assigned.length;
// now we combine both of them, and create a new context
ProtectionDomain pd[] = new ProtectionDomain[slen + n];
ProtectionDomain[] pd = new ProtectionDomain[slen + n];
// first copy in the assigned context domains, no need to compress
if (!skipAssigned) {
@ -695,7 +695,7 @@ public final class AccessControlContext {
} else if (skipAssigned && n == slen) {
return current;
}
ProtectionDomain tmp[] = new ProtectionDomain[n];
ProtectionDomain[] tmp = new ProtectionDomain[n];
System.arraycopy(pd, 0, tmp, 0, n);
pd = tmp;
}

View File

@ -65,7 +65,7 @@ public class CodeSource implements java.io.Serializable {
/*
* The code signers. Certificate chains are concatenated.
*/
private transient java.security.cert.Certificate certs[] = null;
private transient java.security.cert.Certificate[] certs = null;
// cached SocketPermission used for matchLocation
private transient SocketPermission sp;
@ -91,7 +91,7 @@ public class CodeSource implements java.io.Serializable {
* @param certs the certificate(s). It may be null. The contents of the
* array are copied to protect against subsequent modification.
*/
public CodeSource(URL url, java.security.cert.Certificate certs[]) {
public CodeSource(URL url, java.security.cert.Certificate[] certs) {
this.location = url;
if (url != null) {
this.locationNoFragString = URLUtil.urlNoFragString(url);

View File

@ -289,9 +289,9 @@ implements Serializable
if (unresolvedPerms == null)
return null;
java.security.cert.Certificate certs[] = null;
java.security.cert.Certificate[] certs = null;
Object signers[] = p.getClass().getSigners();
Object[] signers = p.getClass().getSigners();
int n = 0;
if (signers != null) {

View File

@ -69,7 +69,7 @@ import sun.security.util.Debug;
*
* <pre>
* SecureRandom random = new SecureRandom();
* byte bytes[] = new byte[20];
* byte[] bytes = new byte[20];
* random.nextBytes(bytes);
* </pre>
*
@ -77,7 +77,7 @@ import sun.security.util.Debug;
* to generate a given number of seed bytes (to seed other random number
* generators, for example):
* <pre>
* byte seed[] = random.generateSeed(20);
* byte[] seed = random.generateSeed(20);
* </pre>
*
* Note: Depending on the implementation, the {@code generateSeed} and
@ -186,7 +186,7 @@ public class SecureRandom extends java.util.Random {
*
* @param seed the seed.
*/
public SecureRandom(byte seed[]) {
public SecureRandom(byte[] seed) {
super(0);
getDefaultPRNG(true, seed);
}
@ -486,7 +486,7 @@ public class SecureRandom extends java.util.Random {
@Override
final protected int next(int numBits) {
int numBytes = (numBits+7)/8;
byte b[] = new byte[numBytes];
byte[] b = new byte[numBytes];
int next = 0;
nextBytes(b);

View File

@ -130,7 +130,7 @@ implements java.io.Serializable
*/
private String actions;
private transient java.security.cert.Certificate certs[];
private transient java.security.cert.Certificate[] certs;
/**
* Creates a new UnresolvedPermission containing the permission
@ -152,7 +152,7 @@ implements java.io.Serializable
public UnresolvedPermission(String type,
String name,
String actions,
java.security.cert.Certificate certs[])
java.security.cert.Certificate[] certs)
{
super(type);
@ -224,7 +224,7 @@ implements java.io.Serializable
* try and resolve this permission using the class loader of the permission
* that was passed in.
*/
Permission resolve(Permission p, java.security.cert.Certificate certs[]) {
Permission resolve(Permission p, java.security.cert.Certificate[] certs) {
if (this.certs != null) {
// if p wasn't signed, we don't have a match
if (certs == null) {

View File

@ -54,7 +54,7 @@ public class RSAMultiPrimePrivateCrtKeySpec extends RSAPrivateKeySpec {
private final BigInteger primeExponentP;
private final BigInteger primeExponentQ;
private final BigInteger crtCoefficient;
private final RSAOtherPrimeInfo otherPrimeInfo[];
private final RSAOtherPrimeInfo[] otherPrimeInfo;
/**
* Creates a new {@code RSAMultiPrimePrivateCrtKeySpec}

View File

@ -178,7 +178,8 @@ public class ArrayList<E> extends AbstractList<E>
public ArrayList(Collection<? extends E> c) {
elementData = c.toArray();
if ((size = elementData.length) != 0) {
// c.toArray might (incorrectly) not return Object[] (see 6260652)
// defend against c.toArray (incorrectly) not returning Object[]
// (see e.g. https://bugs.openjdk.java.net/browse/JDK-6260652)
if (elementData.getClass() != Object[].class)
elementData = Arrays.copyOf(elementData, size, Object[].class);
} else {

View File

@ -3820,7 +3820,7 @@ public class Arrays {
@Override
public Object[] toArray() {
return a.clone();
return Arrays.copyOf(a, a.length, Object[].class);
}
@Override

View File

@ -174,7 +174,8 @@ public class Vector<E>
public Vector(Collection<? extends E> c) {
elementData = c.toArray();
elementCount = elementData.length;
// c.toArray might (incorrectly) not return Object[] (see 6260652)
// defend against c.toArray (incorrectly) not returning Object[]
// (see e.g. https://bugs.openjdk.java.net/browse/JDK-6260652)
if (elementData.getClass() != Object[].class)
elementData = Arrays.copyOf(elementData, elementCount, Object[].class);
}

View File

@ -134,7 +134,8 @@ public class CopyOnWriteArrayList<E>
elements = ((CopyOnWriteArrayList<?>)c).getArray();
else {
elements = c.toArray();
// c.toArray might (incorrectly) not return Object[] (see 6260652)
// defend against c.toArray (incorrectly) not returning Object[]
// (see e.g. https://bugs.openjdk.java.net/browse/JDK-6260652)
if (elements.getClass() != Object[].class)
elements = Arrays.copyOf(elements, elements.length, Object[].class);
}

View File

@ -489,15 +489,17 @@ abstract class AbstractPipeline<E_IN, E_OUT, S extends BaseStream<E_OUT, S>>
@Override
@SuppressWarnings("unchecked")
final <P_IN> void copyIntoWithCancel(Sink<P_IN> wrappedSink, Spliterator<P_IN> spliterator) {
final <P_IN> boolean copyIntoWithCancel(Sink<P_IN> wrappedSink, Spliterator<P_IN> spliterator) {
@SuppressWarnings({"rawtypes","unchecked"})
AbstractPipeline p = AbstractPipeline.this;
while (p.depth > 0) {
p = p.previousStage;
}
wrappedSink.begin(spliterator.getExactSizeIfKnown());
p.forEachWithCancel(spliterator, wrappedSink);
boolean cancelled = p.forEachWithCancel(spliterator, wrappedSink);
wrappedSink.end();
return cancelled;
}
@Override
@ -602,8 +604,9 @@ abstract class AbstractPipeline<E_IN, E_OUT, S extends BaseStream<E_OUT, S>>
*
* @param spliterator the spliterator to pull elements from
* @param sink the sink to push elements to
* @return true if the cancellation was requested
*/
abstract void forEachWithCancel(Spliterator<E_OUT> spliterator, Sink<E_OUT> sink);
abstract boolean forEachWithCancel(Spliterator<E_OUT> spliterator, Sink<E_OUT> sink);
/**
* Make a node builder compatible with this stream shape.

View File

@ -40,6 +40,7 @@ import java.util.function.DoubleToIntFunction;
import java.util.function.DoubleToLongFunction;
import java.util.function.DoubleUnaryOperator;
import java.util.function.IntFunction;
import java.util.function.LongPredicate;
import java.util.function.ObjDoubleConsumer;
import java.util.function.Supplier;
@ -153,10 +154,12 @@ abstract class DoublePipeline<E_IN>
}
@Override
final void forEachWithCancel(Spliterator<Double> spliterator, Sink<Double> sink) {
final boolean forEachWithCancel(Spliterator<Double> spliterator, Sink<Double> sink) {
Spliterator.OfDouble spl = adapt(spliterator);
DoubleConsumer adaptedSink = adapt(sink);
do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink));
boolean cancelled;
do { } while (!(cancelled = sink.cancellationRequested()) && spl.tryAdvance(adaptedSink));
return cancelled;
}
@Override
@ -352,6 +355,16 @@ abstract class DoublePipeline<E_IN>
}
}
@Override
public final DoubleStream takeWhile(DoublePredicate predicate) {
return WhileOps.makeTakeWhileDouble(this, predicate);
}
@Override
public final DoubleStream dropWhile(DoublePredicate predicate) {
return WhileOps.makeDropWhileDouble(this, predicate);
}
@Override
public final DoubleStream sorted() {
return SortedOps.makeDouble(this);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -24,18 +24,13 @@
*/
package java.util.stream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import java.util.DoubleSummaryStatistics;
import java.util.Objects;
import java.util.OptionalDouble;
import java.util.PrimitiveIterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import java.util.function.DoubleBinaryOperator;
import java.util.function.DoubleConsumer;
@ -279,6 +274,137 @@ public interface DoubleStream extends BaseStream<Double, DoubleStream> {
*/
DoubleStream skip(long n);
/**
* Returns, if this stream is ordered, a stream consisting of the longest
* prefix of elements taken from this stream that match the given predicate.
* Otherwise returns, if this stream is unordered, a stream consisting of a
* subset of elements taken from this stream that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to take any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* takes all elements (the result is the same is the input), or if no
* elements of the stream match the given predicate then no elements are
* taken (the result is an empty stream).
*
* <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
* stateful intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code takeWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as
* {@link #generate(DoubleSupplier)}) or removing the ordering constraint
* with {@link #unordered()} may result in significant speedups of
* {@code takeWhile()} in parallel pipelines, if the semantics of your
* situation permit. If consistency with encounter order is required, and
* you are experiencing poor performance or memory utilization with
* {@code takeWhile()} in parallel pipelines, switching to sequential
* execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default DoubleStream takeWhile(DoublePredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.doubleStream(
new WhileOps.UnorderedWhileSpliterator.OfDouble.Taking(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Returns, if this stream is ordered, a stream consisting of the remaining
* elements of this stream after dropping the longest prefix of elements
* that match the given predicate. Otherwise returns, if this stream is
* unordered, a stream consisting of the remaining elements of this stream
* after dropping a subset of elements that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to drop any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* drops all elements (the result is an empty stream), or if no elements of
* the stream match the given predicate then no elements are dropped (the
* result is the same is the input).
*
* <p>This is a <a href="package-summary.html#StreamOps">stateful
* intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code dropWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as
* {@link #generate(DoubleSupplier)}) or removing the ordering constraint
* with {@link #unordered()} may result in significant speedups of
* {@code dropWhile()} in parallel pipelines, if the semantics of your
* situation permit. If consistency with encounter order is required, and
* you are experiencing poor performance or memory utilization with
* {@code dropWhile()} in parallel pipelines, switching to sequential
* execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default DoubleStream dropWhile(DoublePredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.doubleStream(
new WhileOps.UnorderedWhileSpliterator.OfDouble.Dropping(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Performs an action for each element of this stream.
*

View File

@ -156,10 +156,12 @@ abstract class IntPipeline<E_IN>
}
@Override
final void forEachWithCancel(Spliterator<Integer> spliterator, Sink<Integer> sink) {
final boolean forEachWithCancel(Spliterator<Integer> spliterator, Sink<Integer> sink) {
Spliterator.OfInt spl = adapt(spliterator);
IntConsumer adaptedSink = adapt(sink);
do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink));
boolean cancelled;
do { } while (!(cancelled = sink.cancellationRequested()) && spl.tryAdvance(adaptedSink));
return cancelled;
}
@Override
@ -386,6 +388,16 @@ abstract class IntPipeline<E_IN>
return SliceOps.makeInt(this, n, -1);
}
@Override
public final IntStream takeWhile(IntPredicate predicate) {
return WhileOps.makeTakeWhileInt(this, predicate);
}
@Override
public final IntStream dropWhile(IntPredicate predicate) {
return WhileOps.makeDropWhileInt(this, predicate);
}
@Override
public final IntStream sorted() {
return SortedOps.makeInt(this);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -272,6 +272,135 @@ public interface IntStream extends BaseStream<Integer, IntStream> {
*/
IntStream skip(long n);
/**
* Returns, if this stream is ordered, a stream consisting of the longest
* prefix of elements taken from this stream that match the given predicate.
* Otherwise returns, if this stream is unordered, a stream consisting of a
* subset of elements taken from this stream that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to take any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* takes all elements (the result is the same is the input), or if no
* elements of the stream match the given predicate then no elements are
* taken (the result is an empty stream).
*
* <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
* stateful intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code takeWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as {@link #generate(IntSupplier)})
* or removing the ordering constraint with {@link #unordered()} may result
* in significant speedups of {@code takeWhile()} in parallel pipelines, if
* the semantics of your situation permit. If consistency with encounter
* order is required, and you are experiencing poor performance or memory
* utilization with {@code takeWhile()} in parallel pipelines, switching to
* sequential execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default IntStream takeWhile(IntPredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.intStream(
new WhileOps.UnorderedWhileSpliterator.OfInt.Taking(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Returns, if this stream is ordered, a stream consisting of the remaining
* elements of this stream after dropping the longest prefix of elements
* that match the given predicate. Otherwise returns, if this stream is
* unordered, a stream consisting of the remaining elements of this stream
* after dropping a subset of elements that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to drop any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* drops all elements (the result is an empty stream), or if no elements of
* the stream match the given predicate then no elements are dropped (the
* result is the same is the input).
*
* <p>This is a <a href="package-summary.html#StreamOps">stateful
* intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code dropWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as {@link #generate(IntSupplier)})
* or removing the ordering constraint with {@link #unordered()} may result
* in significant speedups of {@code dropWhile()} in parallel pipelines, if
* the semantics of your situation permit. If consistency with encounter
* order is required, and you are experiencing poor performance or memory
* utilization with {@code dropWhile()} in parallel pipelines, switching to
* sequential execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default IntStream dropWhile(IntPredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.intStream(
new WhileOps.UnorderedWhileSpliterator.OfInt.Dropping(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Performs an action for each element of this stream.
*

View File

@ -154,10 +154,12 @@ abstract class LongPipeline<E_IN>
}
@Override
final void forEachWithCancel(Spliterator<Long> spliterator, Sink<Long> sink) {
final boolean forEachWithCancel(Spliterator<Long> spliterator, Sink<Long> sink) {
Spliterator.OfLong spl = adapt(spliterator);
LongConsumer adaptedSink = adapt(sink);
do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink));
boolean cancelled;
do { } while (!(cancelled = sink.cancellationRequested()) && spl.tryAdvance(adaptedSink));
return cancelled;
}
@Override
@ -367,6 +369,16 @@ abstract class LongPipeline<E_IN>
return SliceOps.makeLong(this, n, -1);
}
@Override
public final LongStream takeWhile(LongPredicate predicate) {
return WhileOps.makeTakeWhileLong(this, predicate);
}
@Override
public final LongStream dropWhile(LongPredicate predicate) {
return WhileOps.makeDropWhileLong(this, predicate);
}
@Override
public final LongStream sorted() {
return SortedOps.makeLong(this);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -24,11 +24,7 @@
*/
package java.util.stream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import java.util.LongSummaryStatistics;
import java.util.Objects;
import java.util.OptionalDouble;
@ -36,7 +32,6 @@ import java.util.OptionalLong;
import java.util.PrimitiveIterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.LongBinaryOperator;
@ -277,6 +272,137 @@ public interface LongStream extends BaseStream<Long, LongStream> {
*/
LongStream skip(long n);
/**
* Returns, if this stream is ordered, a stream consisting of the longest
* prefix of elements taken from this stream that match the given predicate.
* Otherwise returns, if this stream is unordered, a stream consisting of a
* subset of elements taken from this stream that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to take any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* takes all elements (the result is the same is the input), or if no
* elements of the stream match the given predicate then no elements are
* taken (the result is an empty stream).
*
* <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
* stateful intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code takeWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as
* {@link #generate(LongSupplier)}) or removing the ordering constraint with
* {@link #unordered()} may result in significant speedups of
* {@code takeWhile()} in parallel pipelines, if the semantics of your
* situation permit. If consistency with encounter order is required, and
* you are experiencing poor performance or memory utilization with
* {@code takeWhile()} in parallel pipelines, switching to sequential
* execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default LongStream takeWhile(LongPredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.longStream(
new WhileOps.UnorderedWhileSpliterator.OfLong.Taking(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Returns, if this stream is ordered, a stream consisting of the remaining
* elements of this stream after dropping the longest prefix of elements
* that match the given predicate. Otherwise returns, if this stream is
* unordered, a stream consisting of the remaining elements of this stream
* after dropping a subset of elements that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to drop any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* drops all elements (the result is an empty stream), or if no elements of
* the stream match the given predicate then no elements are dropped (the
* result is the same is the input).
*
* <p>This is a <a href="package-summary.html#StreamOps">stateful
* intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code dropWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as
* {@link #generate(LongSupplier)}) or removing the ordering constraint with
* {@link #unordered()} may result in significant speedups of
* {@code dropWhile()} in parallel pipelines, if the semantics of your
* situation permit. If consistency with encounter order is required, and
* you are experiencing poor performance or memory utilization with
* {@code dropWhile()} in parallel pipelines, switching to sequential
* execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default LongStream dropWhile(LongPredicate predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.longStream(
new WhileOps.UnorderedWhileSpliterator.OfLong.Dropping(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Performs an action for each element of this stream.
*

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -125,7 +125,11 @@ interface Node<T> {
Node.Builder<T> nodeBuilder = Nodes.builder(size, generator);
nodeBuilder.begin(size);
for (int i = 0; i < from && spliterator.tryAdvance(e -> { }); i++) { }
for (int i = 0; (i < size) && spliterator.tryAdvance(nodeBuilder); i++) { }
if (to == count()) {
spliterator.forEachRemaining(nodeBuilder);
} else {
for (int i = 0; i < size && spliterator.tryAdvance(nodeBuilder); i++) { }
}
nodeBuilder.end();
return nodeBuilder.build();
}
@ -360,7 +364,11 @@ interface Node<T> {
Node.Builder.OfInt nodeBuilder = Nodes.intBuilder(size);
nodeBuilder.begin(size);
for (int i = 0; i < from && spliterator.tryAdvance((IntConsumer) e -> { }); i++) { }
for (int i = 0; (i < size) && spliterator.tryAdvance((IntConsumer) nodeBuilder); i++) { }
if (to == count()) {
spliterator.forEachRemaining((IntConsumer) nodeBuilder);
} else {
for (int i = 0; i < size && spliterator.tryAdvance((IntConsumer) nodeBuilder); i++) { }
}
nodeBuilder.end();
return nodeBuilder.build();
}
@ -433,7 +441,11 @@ interface Node<T> {
Node.Builder.OfLong nodeBuilder = Nodes.longBuilder(size);
nodeBuilder.begin(size);
for (int i = 0; i < from && spliterator.tryAdvance((LongConsumer) e -> { }); i++) { }
for (int i = 0; (i < size) && spliterator.tryAdvance((LongConsumer) nodeBuilder); i++) { }
if (to == count()) {
spliterator.forEachRemaining((LongConsumer) nodeBuilder);
} else {
for (int i = 0; i < size && spliterator.tryAdvance((LongConsumer) nodeBuilder); i++) { }
}
nodeBuilder.end();
return nodeBuilder.build();
}
@ -508,7 +520,11 @@ interface Node<T> {
Node.Builder.OfDouble nodeBuilder = Nodes.doubleBuilder(size);
nodeBuilder.begin(size);
for (int i = 0; i < from && spliterator.tryAdvance((DoubleConsumer) e -> { }); i++) { }
for (int i = 0; (i < size) && spliterator.tryAdvance((DoubleConsumer) nodeBuilder); i++) { }
if (to == count()) {
spliterator.forEachRemaining((DoubleConsumer) nodeBuilder);
} else {
for (int i = 0; i < size && spliterator.tryAdvance((DoubleConsumer) nodeBuilder); i++) { }
}
nodeBuilder.end();
return nodeBuilder.build();
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -69,6 +69,14 @@ final class Nodes {
private static final Node.OfLong EMPTY_LONG_NODE = new EmptyNode.OfLong();
private static final Node.OfDouble EMPTY_DOUBLE_NODE = new EmptyNode.OfDouble();
/**
* @return an array generator for an array whose elements are of type T.
*/
@SuppressWarnings("unchecked")
static <T> IntFunction<T[]> castingArray() {
return size -> (T[]) new Object[size];
}
// General shape-based node creation methods
/**

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -136,8 +136,9 @@ abstract class PipelineHelper<P_OUT> {
*
* @param wrappedSink the destination {@code Sink}
* @param spliterator the source {@code Spliterator}
* @return true if the cancellation was requested
*/
abstract <P_IN> void copyIntoWithCancel(Sink<P_IN> wrappedSink, Spliterator<P_IN> spliterator);
abstract <P_IN> boolean copyIntoWithCancel(Sink<P_IN> wrappedSink, Spliterator<P_IN> spliterator);
/**
* Takes a {@code Sink} that accepts elements of the output type of the

View File

@ -122,8 +122,10 @@ abstract class ReferencePipeline<P_IN, P_OUT>
}
@Override
final void forEachWithCancel(Spliterator<P_OUT> spliterator, Sink<P_OUT> sink) {
do { } while (!sink.cancellationRequested() && spliterator.tryAdvance(sink));
final boolean forEachWithCancel(Spliterator<P_OUT> spliterator, Sink<P_OUT> sink) {
boolean cancelled;
do { } while (!(cancelled = sink.cancellationRequested()) && spliterator.tryAdvance(sink));
return cancelled;
}
@Override
@ -411,6 +413,16 @@ abstract class ReferencePipeline<P_IN, P_OUT>
return SliceOps.makeRef(this, n, -1);
}
@Override
public final Stream<P_OUT> takeWhile(Predicate<? super P_OUT> predicate) {
return WhileOps.makeTakeWhileRef(this, predicate);
}
@Override
public final Stream<P_OUT> dropWhile(Predicate<? super P_OUT> predicate) {
return WhileOps.makeDropWhileRef(this, predicate);
}
// Terminal operations from Stream
@Override

View File

@ -96,11 +96,6 @@ final class SliceOps {
}
}
@SuppressWarnings("unchecked")
private static <T> IntFunction<T[]> castingArray() {
return size -> (T[]) new Object[size];
}
/**
* Appends a "slice" operation to the provided stream. The slice operation
* may be may be skip-only, limit-only, or skip-and-limit.
@ -151,7 +146,7 @@ final class SliceOps {
// cancellation will be more aggressive cancelling later tasks
// if the target slice size has been reached from a given task,
// cancellation should also clear local results if any
return new SliceTask<>(this, helper, spliterator, castingArray(), skip, limit).
return new SliceTask<>(this, helper, spliterator, Nodes.castingArray(), skip, limit).
invoke().spliterator();
}
}

View File

@ -24,7 +24,6 @@
*/
package java.util.stream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
@ -480,6 +479,135 @@ public interface Stream<T> extends BaseStream<T, Stream<T>> {
*/
Stream<T> skip(long n);
/**
* Returns, if this stream is ordered, a stream consisting of the longest
* prefix of elements taken from this stream that match the given predicate.
* Otherwise returns, if this stream is unordered, a stream consisting of a
* subset of elements taken from this stream that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to take any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* takes all elements (the result is the same is the input), or if no
* elements of the stream match the given predicate then no elements are
* taken (the result is an empty stream).
*
* <p>This is a <a href="package-summary.html#StreamOps">short-circuiting
* stateful intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code takeWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as {@link #generate(Supplier)}) or
* removing the ordering constraint with {@link #unordered()} may result in
* significant speedups of {@code takeWhile()} in parallel pipelines, if the
* semantics of your situation permit. If consistency with encounter order
* is required, and you are experiencing poor performance or memory
* utilization with {@code takeWhile()} in parallel pipelines, switching to
* sequential execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default Stream<T> takeWhile(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.stream(
new WhileOps.UnorderedWhileSpliterator.OfRef.Taking<>(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Returns, if this stream is ordered, a stream consisting of the remaining
* elements of this stream after dropping the longest prefix of elements
* that match the given predicate. Otherwise returns, if this stream is
* unordered, a stream consisting of the remaining elements of this stream
* after dropping a subset of elements that match the given predicate.
*
* <p>If this stream is ordered then the longest prefix is a contiguous
* sequence of elements of this stream that match the given predicate. The
* first element of the sequence is the first element of this stream, and
* the element immediately following the last element of the sequence does
* not match the given predicate.
*
* <p>If this stream is unordered, and some (but not all) elements of this
* stream match the given predicate, then the behavior of this operation is
* nondeterministic; it is free to drop any subset of matching elements
* (which includes the empty set).
*
* <p>Independent of whether this stream is ordered or unordered if all
* elements of this stream match the given predicate then this operation
* drops all elements (the result is an empty stream), or if no elements of
* the stream match the given predicate then no elements are dropped (the
* result is the same is the input).
*
* <p>This is a <a href="package-summary.html#StreamOps">stateful
* intermediate operation</a>.
*
* @implSpec
* The default implementation obtains the {@link #spliterator() spliterator}
* of this stream, wraps that spliterator so as to support the semantics
* of this operation on traversal, and returns a new stream associated with
* the wrapped spliterator. The returned stream preserves the execution
* characteristics of this stream (namely parallel or sequential execution
* as per {@link #isParallel()}) but the wrapped spliterator may choose to
* not support splitting. When the returned stream is closed, the close
* handlers for both the returned and this stream are invoked.
*
* @apiNote
* While {@code dropWhile()} is generally a cheap operation on sequential
* stream pipelines, it can be quite expensive on ordered parallel
* pipelines, since the operation is constrained to return not just any
* valid prefix, but the longest prefix of elements in the encounter order.
* Using an unordered stream source (such as {@link #generate(Supplier)}) or
* removing the ordering constraint with {@link #unordered()} may result in
* significant speedups of {@code dropWhile()} in parallel pipelines, if the
* semantics of your situation permit. If consistency with encounter order
* is required, and you are experiencing poor performance or memory
* utilization with {@code dropWhile()} in parallel pipelines, switching to
* sequential execution with {@link #sequential()} may improve performance.
*
* @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>,
* <a href="package-summary.html#Statelessness">stateless</a>
* predicate to apply to elements to determine the longest
* prefix of elements.
* @return the new stream
*/
default Stream<T> dropWhile(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate);
// Reuses the unordered spliterator, which, when encounter is present,
// is safe to use as long as it configured not to split
return StreamSupport.stream(
new WhileOps.UnorderedWhileSpliterator.OfRef.Dropping<>(spliterator(), true, predicate),
isParallel()).onClose(this::close);
}
/**
* Performs an action for each element of this stream.
*

File diff suppressed because it is too large Load Diff

View File

@ -156,8 +156,10 @@ public class SSLEngineResult {
* This value is used to indicate that not-yet-interpreted data
* has been previously received from the remote side, and does
* not need to be received again.
* <P>
* This handshake status only applies to DTLS.
*
* @since 1.9
* @since 9
*/
NEED_UNWRAP_AGAIN;
}
@ -219,7 +221,7 @@ public class SSLEngineResult {
* arguments are null, or if {@code bytesConsumed} or
* {@code bytesProduced} is negative
*
* @since 1.9
* @since 9
*/
public SSLEngineResult(Status status, HandshakeStatus handshakeStatus,
int bytesConsumed, int bytesProduced, long sequenceNumber) {
@ -302,7 +304,7 @@ public class SSLEngineResult {
*
* @see java.lang.Long#compareUnsigned(long, long)
*
* @since 1.9
* @since 9
*/
final public long sequenceNumber() {
return sequenceNumber;

View File

@ -507,7 +507,7 @@ public class PKCS7 {
// certificates (optional)
if (certificates != null && certificates.length != 0) {
// cast to X509CertImpl[] since X509CertImpl implements DerEncoder
X509CertImpl implCerts[] = new X509CertImpl[certificates.length];
X509CertImpl[] implCerts = new X509CertImpl[certificates.length];
for (int i = 0; i < certificates.length; i++) {
if (certificates[i] instanceof X509CertImpl)
implCerts[i] = (X509CertImpl) certificates[i];

View File

@ -78,7 +78,7 @@ public class PKCS8Key implements PrivateKey {
* data is stored and transmitted losslessly, but no knowledge
* about this particular algorithm is available.
*/
private PKCS8Key (AlgorithmId algid, byte key [])
private PKCS8Key (AlgorithmId algid, byte[] key)
throws InvalidKeyException {
this.algid = algid;
this.key = key;

View File

@ -154,28 +154,28 @@ public final class PKCS12KeyStore extends KeyStoreSpi {
private static final Debug debug = Debug.getInstance("pkcs12");
private static final int keyBag[] = {1, 2, 840, 113549, 1, 12, 10, 1, 2};
private static final int certBag[] = {1, 2, 840, 113549, 1, 12, 10, 1, 3};
private static final int secretBag[] = {1, 2, 840, 113549, 1, 12, 10, 1, 5};
private static final int[] keyBag = {1, 2, 840, 113549, 1, 12, 10, 1, 2};
private static final int[] certBag = {1, 2, 840, 113549, 1, 12, 10, 1, 3};
private static final int[] secretBag = {1, 2, 840, 113549, 1, 12, 10, 1, 5};
private static final int pkcs9Name[] = {1, 2, 840, 113549, 1, 9, 20};
private static final int pkcs9KeyId[] = {1, 2, 840, 113549, 1, 9, 21};
private static final int[] pkcs9Name = {1, 2, 840, 113549, 1, 9, 20};
private static final int[] pkcs9KeyId = {1, 2, 840, 113549, 1, 9, 21};
private static final int pkcs9certType[] = {1, 2, 840, 113549, 1, 9, 22, 1};
private static final int[] pkcs9certType = {1, 2, 840, 113549, 1, 9, 22, 1};
private static final int pbeWithSHAAnd40BitRC2CBC[] =
private static final int[] pbeWithSHAAnd40BitRC2CBC =
{1, 2, 840, 113549, 1, 12, 1, 6};
private static final int pbeWithSHAAnd3KeyTripleDESCBC[] =
private static final int[] pbeWithSHAAnd3KeyTripleDESCBC =
{1, 2, 840, 113549, 1, 12, 1, 3};
private static final int pbes2[] = {1, 2, 840, 113549, 1, 5, 13};
private static final int[] pbes2 = {1, 2, 840, 113549, 1, 5, 13};
// TODO: temporary Oracle OID
/*
* { joint-iso-itu-t(2) country(16) us(840) organization(1) oracle(113894)
* jdk(746875) crypto(1) id-at-trustedKeyUsage(1) }
*/
private static final int TrustedKeyUsage[] =
private static final int[] TrustedKeyUsage =
{2, 16, 840, 1, 113894, 746875, 1, 1};
private static final int AnyExtendedKeyUsage[] = {2, 5, 29, 37, 0};
private static final int[] AnyExtendedKeyUsage = {2, 5, 29, 37, 0};
private static ObjectIdentifier PKCS8ShroudedKeyBag_OID;
private static ObjectIdentifier CertBag_OID;
@ -243,7 +243,7 @@ public final class PKCS12KeyStore extends KeyStoreSpi {
// A private key entry and its supporting certificate chain
private static class PrivateKeyEntry extends KeyEntry {
byte[] protectedPrivKey;
Certificate chain[];
Certificate[] chain;
};
// A secret key

View File

@ -403,7 +403,7 @@ public class AuthPolicyFile extends javax.security.auth.Policy {
debug.println(" "+perm);
}
} catch (ClassNotFoundException cnfe) {
Certificate certs[];
Certificate[] certs;
if (pe.signedBy != null) {
certs = getCertificates(keyStore, pe.signedBy);
} else {
@ -623,7 +623,7 @@ public class AuthPolicyFile extends javax.security.auth.Policy {
init();
}
final CodeSource codesource[] = {null};
final CodeSource[] codesource = {null};
codesource[0] = canonicalizeCodebase(cs, true);
@ -666,7 +666,7 @@ public class AuthPolicyFile extends javax.security.auth.Policy {
// now see if any of the keys are trusted ids.
if (!ignoreIdentityScope) {
Certificate certs[] = codesource[0].getCertificates();
Certificate[] certs = codesource[0].getCertificates();
if (certs != null) {
for (int k=0; k < certs.length; k++) {
if (aliasMapping.get(certs[k]) == null &&

View File

@ -237,7 +237,7 @@ public class DSAParameterGenerator extends AlgorithmParameterGeneratorSpi {
BigInteger offset = ONE;
/* Step 11 */
for (counter = 0; counter < 4*valueL; counter++) {
BigInteger V[] = new BigInteger[n + 1];
BigInteger[] V = new BigInteger[n + 1];
/* Step 11.1 */
for (int j = 0; j <= n; j++) {
BigInteger J = BigInteger.valueOf(j);

View File

@ -82,7 +82,7 @@ public abstract class JavaKeyStore extends KeyStoreSpi {
private static class KeyEntry {
Date date; // the creation date of this entry
byte[] protectedPrivKey;
Certificate chain[];
Certificate[] chain;
};
// Trusted certificates
@ -604,7 +604,7 @@ public abstract class JavaKeyStore extends KeyStoreSpi {
* the keystore (such as deleting or modifying key or
* certificate entries).
*/
byte digest[] = md.digest();
byte[] digest = md.digest();
dos.write(digest);
dos.flush();
@ -770,9 +770,8 @@ public abstract class JavaKeyStore extends KeyStoreSpi {
* with
*/
if (password != null) {
byte computed[], actual[];
computed = md.digest();
actual = new byte[computed.length];
byte[] computed = md.digest();
byte[] actual = new byte[computed.length];
dis.readFully(actual);
for (int i = 0; i < computed.length; i++) {
if (computed[i] != actual[i]) {

View File

@ -795,7 +795,7 @@ public class PolicyFile extends java.security.Policy {
// an unresolved permission which will be resolved
// when implies is called
// Add it to entry
Certificate certs[];
Certificate[] certs;
if (pe.signedBy != null) {
certs = getCertificates(keyStore,
pe.signedBy,
@ -817,7 +817,7 @@ public class PolicyFile extends java.security.Policy {
debug.println(" "+perm);
}
} catch (ClassNotFoundException cnfe) {
Certificate certs[];
Certificate[] certs;
if (pe.signedBy != null) {
certs = getCertificates(keyStore,
pe.signedBy,
@ -2032,7 +2032,7 @@ public class PolicyFile extends java.security.Policy {
*
* @serial
*/
private Certificate certs[];
private Certificate[] certs;
/**
* Creates a new SelfPermission containing the permission
@ -2048,7 +2048,7 @@ public class PolicyFile extends java.security.Policy {
* certificate first and the (root) certificate authority last).
*/
public SelfPermission(String type, String name, String actions,
Certificate certs[])
Certificate[] certs)
{
super(type);
if (type == null) {

View File

@ -1353,7 +1353,7 @@ public class PolicyParser {
}
}
public static void main(String arg[]) throws Exception {
public static void main(String[] arg) throws Exception {
try (FileReader fr = new FileReader(arg[0]);
FileWriter fw = new FileWriter(arg[1])) {
PolicyParser pp = new PolicyParser(true);

View File

@ -85,7 +85,7 @@ implements java.io.Serializable {
*
* @param seed the seed.
*/
private SecureRandom(byte seed[]) {
private SecureRandom(byte[] seed) {
init(seed);
}

View File

@ -70,7 +70,7 @@ class ByteBufferInputStream extends InputStream {
* Increments position().
*/
@Override
public int read(byte b[]) throws IOException {
public int read(byte[] b) throws IOException {
if (bb == null) {
throw new IOException("read on a closed InputStream");
@ -85,7 +85,7 @@ class ByteBufferInputStream extends InputStream {
* Increments position().
*/
@Override
public int read(byte b[], int off, int len) throws IOException {
public int read(byte[] b, int off, int len) throws IOException {
if (bb == null) {
throw new IOException("read on a closed InputStream");

View File

@ -810,7 +810,7 @@ final class ClientHandshaker extends Handshaker {
String alias = null;
int keytypesTmpSize = keytypesTmp.size();
if (keytypesTmpSize != 0) {
String keytypes[] =
String[] keytypes =
keytypesTmp.toArray(new String[keytypesTmpSize]);
if (conn != null) {

View File

@ -48,7 +48,7 @@ final class DHClientKeyExchange extends HandshakeMessage {
* This value may be empty if it was included in the
* client's certificate ...
*/
private byte dh_Yc[]; // 1 to 2^16 -1 bytes
private byte[] dh_Yc; // 1 to 2^16 -1 bytes
BigInteger getClientPublicKey() {
return dh_Yc == null ? null : new BigInteger(1, dh_Yc);

View File

@ -146,7 +146,7 @@ public final class HandshakeInStream extends ByteArrayInputStream {
byte[] getBytes8() throws IOException {
int len = getInt8();
verifyLength(len);
byte b[] = new byte[len];
byte[] b = new byte[len];
read(b);
return b;
@ -155,7 +155,7 @@ public final class HandshakeInStream extends ByteArrayInputStream {
public byte[] getBytes16() throws IOException {
int len = getInt16();
verifyLength(len);
byte b[] = new byte[len];
byte[] b = new byte[len];
read(b);
return b;
@ -164,7 +164,7 @@ public final class HandshakeInStream extends ByteArrayInputStream {
byte[] getBytes24() throws IOException {
int len = getInt24();
verifyLength(len);
byte b[] = new byte[len];
byte[] b = new byte[len];
read(b);
return b;

Some files were not shown because too many files have changed in this diff Show More