8284780: Need methods to create pre-sized HashSet and LinkedHashSet

Reviewed-by: naoto, bpb, dfuchs, ascarpino
This commit is contained in:
XenoAmess 2022-06-09 01:50:54 +00:00 committed by Stuart Marks
parent a941bc2de6
commit e01cd7c3ed
29 changed files with 147 additions and 47 deletions

View File

@ -56,7 +56,7 @@ abstract class PBEKeyFactory extends SecretKeyFactorySpi {
}
static {
validTypes = new HashSet<>(17);
validTypes = HashSet.newHashSet(17);
validTypes.add("PBEWithMD5AndDES".toUpperCase(Locale.ENGLISH));
validTypes.add("PBEWithSHA1AndDESede".toUpperCase(Locale.ENGLISH));
validTypes.add("PBEWithSHA1AndRC2_40".toUpperCase(Locale.ENGLISH));

View File

@ -1648,7 +1648,7 @@ public final class ObjectStreamClass implements Serializable {
ObjectStreamField[] boundFields =
new ObjectStreamField[serialPersistentFields.length];
Set<String> fieldNames = new HashSet<>(serialPersistentFields.length);
Set<String> fieldNames = HashSet.newHashSet(serialPersistentFields.length);
for (int i = 0; i < serialPersistentFields.length; i++) {
ObjectStreamField spf = serialPersistentFields[i];

View File

@ -310,7 +310,7 @@ import static jdk.internal.org.objectweb.asm.Opcodes.*;
interfaceNames = new String[]{interfaceName};
} else {
// Assure no duplicate interfaces (ClassFormatError)
Set<String> itfs = new LinkedHashSet<>(altInterfaces.length + 1);
Set<String> itfs = LinkedHashSet.newLinkedHashSet(altInterfaces.length + 1);
itfs.add(interfaceName);
for (Class<?> i : altInterfaces) {
itfs.add(i.getName().replace('.', '/'));

View File

@ -718,7 +718,7 @@ public abstract class Charset
if (aliasSet != null)
return aliasSet;
int n = aliases.length;
HashSet<String> hs = new HashSet<>(n);
HashSet<String> hs = HashSet.newHashSet(n);
for (int i = 0; i < n; i++)
hs.add(aliases[i]);
aliasSet = Collections.unmodifiableSet(hs);

View File

@ -123,7 +123,7 @@ public final class DecimalStyle {
*/
public static Set<Locale> getAvailableLocales() {
Locale[] l = DecimalFormatSymbols.getAvailableLocales();
Set<Locale> locales = new HashSet<>(l.length);
Set<Locale> locales = HashSet.newHashSet(l.length);
Collections.addAll(locales, l);
return locales;
}

View File

@ -2640,14 +2640,7 @@ public abstract class Calendar implements Serializable, Cloneable, Comparable<Ca
}
private static class AvailableCalendarTypes {
private static final Set<String> SET;
static {
Set<String> set = new HashSet<>(3);
set.add("gregory");
set.add("buddhist");
set.add("japanese");
SET = Collections.unmodifiableSet(set);
}
private static final Set<String> SET = Set.of("gregory", "buddhist", "japanese");
private AvailableCalendarTypes() {
}
}

View File

@ -125,6 +125,10 @@ public class HashSet<E>
* Constructs a new, empty set; the backing {@code HashMap} instance has
* the specified initial capacity and the specified load factor.
*
* @apiNote
* To create a {@code HashSet} with an initial capacity that accommodates
* an expected number of elements, use {@link #newHashSet(int) newHashSet}.
*
* @param initialCapacity the initial capacity of the hash map
* @param loadFactor the load factor of the hash map
* @throws IllegalArgumentException if the initial capacity is less
@ -138,6 +142,10 @@ public class HashSet<E>
* Constructs a new, empty set; the backing {@code HashMap} instance has
* the specified initial capacity and default load factor (0.75).
*
* @apiNote
* To create a {@code HashSet} with an initial capacity that accommodates
* an expected number of elements, use {@link #newHashSet(int) newHashSet}.
*
* @param initialCapacity the initial capacity of the hash table
* @throws IllegalArgumentException if the initial capacity is less
* than zero
@ -372,4 +380,21 @@ public class HashSet<E>
public <T> T[] toArray(T[] a) {
return map.keysToArray(map.prepareArray(a));
}
/**
* Creates a new, empty HashSet suitable for the expected number of elements.
* The returned set uses the default load factor of 0.75, and its initial capacity is
* generally large enough so that the expected number of elements can be added
* without resizing the set.
*
* @param numElements the expected number of elements
* @param <T> the type of elements maintained by the new set
* @return the newly created set
* @throws IllegalArgumentException if numElements is negative
* @since 19
*/
public static <T> HashSet<T> newHashSet(int numElements) {
return new HashSet<>(HashMap.calculateHashMapCapacity(numElements));
}
}

View File

@ -126,6 +126,10 @@ public class LinkedHashSet<E>
* Constructs a new, empty linked hash set with the specified initial
* capacity and load factor.
*
* @apiNote
* To create a {@code LinkedHashSet} with an initial capacity that accommodates
* an expected number of elements, use {@link #newLinkedHashSet(int) newLinkedHashSet}.
*
* @param initialCapacity the initial capacity of the linked hash set
* @param loadFactor the load factor of the linked hash set
* @throws IllegalArgumentException if the initial capacity is less
@ -139,6 +143,10 @@ public class LinkedHashSet<E>
* Constructs a new, empty linked hash set with the specified initial
* capacity and the default load factor (0.75).
*
* @apiNote
* To create a {@code LinkedHashSet} with an initial capacity that accommodates
* an expected number of elements, use {@link #newLinkedHashSet(int) newLinkedHashSet}.
*
* @param initialCapacity the initial capacity of the LinkedHashSet
* @throws IllegalArgumentException if the initial capacity is less
* than zero
@ -166,7 +174,7 @@ public class LinkedHashSet<E>
* @throws NullPointerException if the specified collection is null
*/
public LinkedHashSet(Collection<? extends E> c) {
super(Math.max(2*c.size(), 11), .75f, true);
super(HashMap.calculateHashMapCapacity(Math.max(c.size(), 12)), .75f, true);
addAll(c);
}
@ -193,4 +201,21 @@ public class LinkedHashSet<E>
public Spliterator<E> spliterator() {
return Spliterators.spliterator(this, Spliterator.DISTINCT | Spliterator.ORDERED);
}
/**
* Creates a new, empty LinkedHashSet suitable for the expected number of elements.
* The returned set uses the default load factor of 0.75, and its initial capacity is
* generally large enough so that the expected number of elements can be added
* without resizing the set.
*
* @param numElements the expected number of elements
* @param <T> the type of elements maintained by the new set
* @return the newly created set
* @throws IllegalArgumentException if numElements is negative
* @since 19
*/
public static <T> LinkedHashSet<T> newLinkedHashSet(int numElements) {
return new LinkedHashSet<>(HashMap.calculateHashMapCapacity(numElements));
}
}

View File

@ -446,7 +446,7 @@ public final class ModuleInfo {
int exports_to_count = in.readUnsignedShort();
if (exports_to_count > 0) {
Set<String> targets = new HashSet<>(exports_to_count);
Set<String> targets = HashSet.newHashSet(exports_to_count);
for (int j=0; j<exports_to_count; j++) {
int exports_to_index = in.readUnsignedShort();
String target = cpool.getModuleName(exports_to_index);
@ -486,7 +486,7 @@ public final class ModuleInfo {
int open_to_count = in.readUnsignedShort();
if (open_to_count > 0) {
Set<String> targets = new HashSet<>(open_to_count);
Set<String> targets = HashSet.newHashSet(open_to_count);
for (int j=0; j<open_to_count; j++) {
int opens_to_index = in.readUnsignedShort();
String target = cpool.getModuleName(opens_to_index);
@ -540,7 +540,7 @@ public final class ModuleInfo {
throws IOException
{
int package_count = in.readUnsignedShort();
Set<String> packages = new HashSet<>(package_count);
Set<String> packages = HashSet.newHashSet(package_count);
for (int i=0; i<package_count; i++) {
int index = in.readUnsignedShort();
String pn = cpool.getPackageName(index);

View File

@ -278,7 +278,7 @@ public class HttpURLConnection extends java.net.HttpURLConnection {
allowRestrictedHeaders = Boolean.parseBoolean(
props.getProperty("sun.net.http.allowRestrictedHeaders"));
if (!allowRestrictedHeaders) {
restrictedHeaderSet = new HashSet<>(restrictedHeaders.length);
restrictedHeaderSet = HashSet.newHashSet(restrictedHeaders.length);
for (int i=0; i < restrictedHeaders.length; i++) {
restrictedHeaderSet.add(restrictedHeaders[i].toLowerCase());
}

View File

@ -230,7 +230,7 @@ abstract class AsynchronousServerSocketChannelImpl
static final Set<SocketOption<?>> defaultOptions = defaultOptions();
private static Set<SocketOption<?>> defaultOptions() {
HashSet<SocketOption<?>> set = new HashSet<>(2);
HashSet<SocketOption<?>> set = HashSet.newHashSet(2);
set.add(StandardSocketOptions.SO_RCVBUF);
set.add(StandardSocketOptions.SO_REUSEADDR);
if (Net.isReusePortAvailable()) {

View File

@ -503,7 +503,7 @@ abstract class AsynchronousSocketChannelImpl
static final Set<SocketOption<?>> defaultOptions = defaultOptions();
private static Set<SocketOption<?>> defaultOptions() {
HashSet<SocketOption<?>> set = new HashSet<>(5);
HashSet<SocketOption<?>> set = HashSet.newHashSet(5);
set.add(StandardSocketOptions.SO_SNDBUF);
set.add(StandardSocketOptions.SO_RCVBUF);
set.add(StandardSocketOptions.SO_KEEPALIVE);

View File

@ -105,7 +105,7 @@ abstract class AbstractPoller implements Runnable {
// validate arguments before request to poller
if (dir == null)
throw new NullPointerException();
Set<WatchEvent.Kind<?>> eventSet = new HashSet<>(events.length);
Set<WatchEvent.Kind<?>> eventSet = HashSet.newHashSet(events.length);
for (WatchEvent.Kind<?> event: events) {
// standard events
if (event == StandardWatchEventKinds.ENTRY_CREATE ||

View File

@ -95,7 +95,7 @@ class PollingWatchService
throws IOException
{
// check events - CCE will be thrown if there are invalid elements
final Set<WatchEvent.Kind<?>> eventSet = new HashSet<>(events.length);
final Set<WatchEvent.Kind<?>> eventSet = HashSet.newHashSet(events.length);
for (WatchEvent.Kind<?> event: events) {
// standard events
if (event == StandardWatchEventKinds.ENTRY_CREATE ||

View File

@ -514,7 +514,7 @@ public class PKCS7 {
// CRLs (optional)
if (crls != null && crls.length != 0) {
// cast to X509CRLImpl[] since X509CRLImpl implements DerEncoder
Set<X509CRLImpl> implCRLs = new HashSet<>(crls.length);
Set<X509CRLImpl> implCRLs = HashSet.newHashSet(crls.length);
for (X509CRL crl: crls) {
if (crl instanceof X509CRLImpl)
implCRLs.add((X509CRLImpl) crl);

View File

@ -92,7 +92,7 @@ class ConstraintsChecker extends PKIXCertPathChecker {
@Override
public Set<String> getSupportedExtensions() {
if (supportedExts == null) {
supportedExts = new HashSet<String>(2);
supportedExts = HashSet.newHashSet(2);
supportedExts.add(BasicConstraints_Id.toString());
supportedExts.add(NameConstraints_Id.toString());
supportedExts = Collections.unmodifiableSet(supportedExts);

View File

@ -81,8 +81,8 @@ final class ForwardBuilder extends Builder {
// populate sets of trusted certificates and subject DNs
trustAnchors = buildParams.trustAnchors();
trustedCerts = new HashSet<X509Certificate>(trustAnchors.size());
trustedSubjectDNs = new HashSet<X500Principal>(trustAnchors.size());
trustedCerts = HashSet.newHashSet(trustAnchors.size());
trustedSubjectDNs = HashSet.newHashSet(trustAnchors.size());
for (TrustAnchor anchor : trustAnchors) {
X509Certificate trustedCert = anchor.getTrustedCert();
if (trustedCert != null) {

View File

@ -85,7 +85,7 @@ class KeyChecker extends PKIXCertPathChecker {
@Override
public Set<String> getSupportedExtensions() {
if (supportedExts == null) {
supportedExts = new HashSet<String>(3);
supportedExts = HashSet.newHashSet(3);
supportedExts.add(KeyUsage_Id.toString());
supportedExts.add(ExtendedKeyUsage_Id.toString());
supportedExts.add(SubjectAlternativeName_Id.toString());

View File

@ -93,7 +93,7 @@ class PolicyChecker extends PKIXCertPathChecker {
if (initialPolicies.isEmpty()) {
// if no initialPolicies are specified by user, set
// initPolicies to be anyPolicy by default
this.initPolicies = new HashSet<String>(1);
this.initPolicies = HashSet.newHashSet(1);
this.initPolicies.add(ANY_POLICY);
} else {
this.initPolicies = new HashSet<String>(initialPolicies);
@ -154,7 +154,7 @@ class PolicyChecker extends PKIXCertPathChecker {
@Override
public Set<String> getSupportedExtensions() {
if (supportedExts == null) {
supportedExts = new HashSet<String>(4);
supportedExts = HashSet.newHashSet(4);
supportedExts.add(CertificatePolicies_Id.toString());
supportedExts.add(PolicyMappings_Id.toString());
supportedExts.add(PolicyConstraints_Id.toString());

View File

@ -573,7 +573,7 @@ final class CertificateMessage {
if ((subAltDnsName != null) && !subAltDnsName.isEmpty()) {
if (subAltDnsNames == null) {
subAltDnsNames =
new HashSet<>(subjectAltNames.size());
HashSet.newHashSet(subjectAltNames.size());
}
subAltDnsNames.add(subAltDnsName);
}

View File

@ -110,7 +110,7 @@ final class SunX509KeyManagerImpl extends X509ExtendedKeyManager {
// assert privateKey and certificates != null
this.privateKey = privateKey;
this.certificates = certificates;
this.issuerX500Principals = new HashSet<>(certificates.length);
this.issuerX500Principals = HashSet.newHashSet(certificates.length);
for (X509Certificate certificate : certificates) {
issuerX500Principals.add(certificate.getIssuerX500Principal());
}

View File

@ -313,7 +313,7 @@ public final class InternalLocaleBuilder {
clearExtensions();
if (!LocaleUtils.isEmpty(bcpExtensions)) {
Set<CaseInsensitiveChar> done = new HashSet<>(bcpExtensions.size());
Set<CaseInsensitiveChar> done = HashSet.newHashSet(bcpExtensions.size());
for (String bcpExt : bcpExtensions) {
CaseInsensitiveChar key = new CaseInsensitiveChar(bcpExt);
// ignore duplicates

View File

@ -148,7 +148,7 @@ public class OpeningHandshake {
private static Collection<String> createRequestSubprotocols(
Collection<String> subprotocols)
{
LinkedHashSet<String> sp = new LinkedHashSet<>(subprotocols.size(), 1);
LinkedHashSet<String> sp = LinkedHashSet.newLinkedHashSet(subprotocols.size());
for (String s : subprotocols) {
if (s.trim().isEmpty() || !isValidName(s)) {
throw illegal("Bad subprotocol syntax: " + s);

View File

@ -384,7 +384,7 @@ public class TCPEndpoint implements Endpoint {
Set<TCPTransport> s;
synchronized (localEndpoints) {
// presize s to number of localEndpoints
s = new HashSet<TCPTransport>(localEndpoints.size());
s = HashSet.newHashSet(localEndpoints.size());
for (LinkedList<TCPEndpoint> epList : localEndpoints.values()) {
/*
* Each local endpoint has its transport added to s.

View File

@ -3309,7 +3309,7 @@ class ZipFileSystem extends FileSystem {
public Optional<Set<PosixFilePermission>> storedPermissions() {
Set<PosixFilePermission> perms = null;
if (posixPerms != -1) {
perms = new HashSet<>(PosixFilePermission.values().length);
perms = HashSet.newHashSet(PosixFilePermission.values().length);
for (PosixFilePermission perm : PosixFilePermission.values()) {
if ((posixPerms & ZipUtils.permToFlag(perm)) != 0) {
perms.add(perm);

View File

@ -81,7 +81,7 @@ public class UnreferencedRAFClosesFd {
// Prepare to wait for FOS, FD, Cleanup to be reclaimed
ReferenceQueue<Object> queue = new ReferenceQueue<>();
HashSet<Reference<?>> pending = new HashSet<>(3);
HashSet<Reference<?>> pending = HashSet.newHashSet(3);
pending.add(new WeakReference<>(cleanup, queue));
pending.add(new WeakReference<>(raf, queue));
pending.add(new WeakReference<>(fd, queue));

View File

@ -57,7 +57,7 @@ public class TestFileEncoding {
private String expectedEncoding; // Expected value for file.encoding
private String langVar = null; // Value to set for LANG, etc
private static Set<String> envToRm = new HashSet<>(3);
private static Set<String> envToRm = HashSet.newHashSet(3);
static {
// Take these vars out of the test's run environment, possibly adding
// our own value back in.

View File

@ -434,7 +434,7 @@ public class FormatIteratorTest extends IntlTest {
}
public Set<Attribute> makeAttributes(List<Object> names) {
Set<Attribute> set = new HashSet<>(Math.max(1, names.size()));
Set<Attribute> set = HashSet.newHashSet(names.size());
for (int counter = 0; counter < names.size(); counter++) {
set.add(makeAttribute((String)names.get(counter)));

View File

@ -29,13 +29,16 @@ import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.invoke.VarHandle;
import java.lang.reflect.Field;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -48,7 +51,7 @@ import static org.testng.Assert.assertNull;
/*
* @test
* @bug 8186958 8210280 8281631 8285386
* @bug 8186958 8210280 8281631 8285386 8284780
* @modules java.base/java.util:open
* @summary White box tests for HashMap-related internals around table sizing
* @run testng/othervm -Xmx2g WhiteBoxResizeTest
@ -58,6 +61,7 @@ public class WhiteBoxResizeTest {
final MethodHandle TABLE_SIZE_FOR;
final VarHandle HM_TABLE;
final VarHandle WHM_TABLE;
final VarHandle HS_MAP;
public WhiteBoxResizeTest() throws ReflectiveOperationException {
MethodHandles.Lookup hmlookup = MethodHandles.privateLookupIn(HashMap.class, MethodHandles.lookup());
@ -67,6 +71,9 @@ public class WhiteBoxResizeTest {
MethodHandles.Lookup whmlookup = MethodHandles.privateLookupIn(WeakHashMap.class, MethodHandles.lookup());
WHM_TABLE = whmlookup.unreflectVarHandle(WeakHashMap.class.getDeclaredField("table"));
MethodHandles.Lookup hslookup = MethodHandles.privateLookupIn(HashSet.class, MethodHandles.lookup());
HS_MAP = hslookup.unreflectVarHandle(HashSet.class.getDeclaredField("map"));
}
/*
@ -328,15 +335,17 @@ public class WhiteBoxResizeTest {
Object[] rsc(String label,
int size,
int expectedCapacity,
Supplier<Map<String, String>> supplier) {
Supplier<Capacitiable> supplier) {
return new Object[]{label, size, expectedCapacity, supplier};
}
List<Object[]> genRequestedSizeCases(int size, int cap) {
return Arrays.asList(
rsc("rshm", size, cap, () -> HashMap.newHashMap(size)),
rsc("rslm", size, cap, () -> LinkedHashMap.newLinkedHashMap(size)),
rsc("rswm", size, cap, () -> WeakHashMap.newWeakHashMap(size))
rsc("rshm", size, cap, () -> new MapCapacitiable(HashMap.newHashMap(size))),
rsc("rslm", size, cap, () -> new MapCapacitiable(LinkedHashMap.newLinkedHashMap(size))),
rsc("rswm", size, cap, () -> new MapCapacitiable(WeakHashMap.newWeakHashMap(size))),
rsc("rshs", size, cap, () -> new SetCapacitiable(HashSet.newHashSet(size))),
rsc("rsls", size, cap, () -> new SetCapacitiable(LinkedHashSet.newLinkedHashSet(size)))
);
}
@ -364,9 +373,57 @@ public class WhiteBoxResizeTest {
public void requestedSize(String label, // unused, included for diagnostics
int size, // unused, included for diagnostics
int expectedCapacity,
Supplier<Map<String, String>> s) {
Map<String, String> map = s.get();
map.put("", "");
assertEquals(capacity(map), expectedCapacity);
Supplier<Capacitiable> s) {
Capacitiable capacitiable = s.get();
capacitiable.init();
assertEquals(capacitiable.capacity(), expectedCapacity);
}
interface Capacitiable {
void init();
int capacity();
}
class MapCapacitiable implements Capacitiable {
private final Map<String, String> content;
public MapCapacitiable(Map<String, String> content) {
this.content = content;
}
@Override
public void init() {
content.put("", "");
}
@Override
public int capacity() {
return table(content).length;
}
}
class SetCapacitiable implements Capacitiable {
private final Set<String> content;
public SetCapacitiable(Set<String> content) {
this.content = content;
}
@Override
public void init() {
content.add("");
}
@Override
public int capacity() {
HashMap<?, ?> hashMap = (HashMap<?, ?>) HS_MAP.get(content);
return table(hashMap).length;
}
}
}