8023463: Improvements to HashMap/LinkedHashMap use of bins/buckets and trees (red/black)

8012913: LinkedHashMap key/value/entry spliterators should report ORDERED

Co-authored-by: Doug Lea <dl@cs.oswego.edu>
Reviewed-by: mduigou, forax, bchristi, alanb
This commit is contained in:
Paul Sandoz 2013-09-04 09:34:25 +02:00
parent 1f2ba9f228
commit 0fb014c2b3
8 changed files with 2429 additions and 2945 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -24,9 +24,12 @@
*/
package java.util;
import java.io.*;
import java.util.function.Consumer;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.io.Serializable;
import java.io.IOException;
/**
* <p>Hash table and linked list implementation of the <tt>Map</tt> interface,
@ -57,9 +60,9 @@ import java.util.function.BiFunction;
* order they were presented.)
*
* <p>A special {@link #LinkedHashMap(int,float,boolean) constructor} is
* provided to create a <tt>LinkedHashMap</tt> whose order of iteration is the
* order in which its entries were last accessed, from least-recently accessed
* to most-recently (<i>access-order</i>). This kind of map is well-suited to
* provided to create a linked hash map whose order of iteration is the order
* in which its entries were last accessed, from least-recently accessed to
* most-recently (<i>access-order</i>). This kind of map is well-suited to
* building LRU caches. Invoking the <tt>put</tt> or <tt>get</tt> method
* results in an access to the corresponding entry (assuming it exists after
* the invocation completes). The <tt>putAll</tt> method generates one entry
@ -155,18 +158,53 @@ import java.util.function.BiFunction;
* @see Hashtable
* @since 1.4
*/
public class LinkedHashMap<K,V>
extends HashMap<K,V>
implements Map<K,V>
{
/*
* Implementation note. A previous version of this class was
* internally structured a little differently. Because superclass
* HashMap now uses trees for some of its nodes, class
* LinkedHashMap.Entry is now treated as intermediary node class
* that can also be converted to tree form. The name of this
* class, LinkedHashMap.Entry, is confusing in several ways in its
* current context, but cannot be changed. Otherwise, even though
* it is not exported outside this package, some existing source
* code is known to have relied on a symbol resolution corner case
* rule in calls to removeEldestEntry that suppressed compilation
* errors due to ambiguous usages. So, we keep the name to
* preserve unmodified compilability.
*
* The changes in node classes also require using two fields
* (head, tail) rather than a pointer to a header node to maintain
* the doubly-linked before/after list. This class also
* previously used a different style of callback methods upon
* access, insertion, and removal.
*/
/**
* HashMap.Node subclass for normal LinkedHashMap entries.
*/
static class Entry<K,V> extends HashMap.Node<K,V> {
Entry<K,V> before, after;
Entry(int hash, K key, V value, Node<K,V> next) {
super(hash, key, value, next);
}
}
private static final long serialVersionUID = 3801124242820219131L;
/**
* The head of the doubly linked list.
* The head (eldest) of the doubly linked list.
*/
private transient Entry<K,V> header;
transient LinkedHashMap.Entry<K,V> head;
/**
* The tail (youngest) of the doubly linked list.
*/
transient LinkedHashMap.Entry<K,V> tail;
/**
* The iteration ordering method for this linked hash map: <tt>true</tt>
@ -174,7 +212,125 @@ public class LinkedHashMap<K,V>
*
* @serial
*/
private final boolean accessOrder;
final boolean accessOrder;
// internal utilities
// link at the end of list
private void linkNodeLast(LinkedHashMap.Entry<K,V> p) {
LinkedHashMap.Entry<K,V> last = tail;
tail = p;
if (last == null)
head = p;
else {
p.before = last;
last.after = p;
}
}
// apply src's links to dst
private void transferLinks(LinkedHashMap.Entry<K,V> src,
LinkedHashMap.Entry<K,V> dst) {
LinkedHashMap.Entry<K,V> b = dst.before = src.before;
LinkedHashMap.Entry<K,V> a = dst.after = src.after;
if (b == null)
head = dst;
else
b.after = dst;
if (a == null)
tail = dst;
else
a.before = dst;
}
// overrides of HashMap hook methods
void reinitialize() {
super.reinitialize();
head = tail = null;
}
Node<K,V> newNode(int hash, K key, V value, Node<K,V> e) {
LinkedHashMap.Entry<K,V> p =
new LinkedHashMap.Entry<K,V>(hash, key, value, e);
linkNodeLast(p);
return p;
}
Node<K,V> replacementNode(Node<K,V> p, Node<K,V> next) {
LinkedHashMap.Entry<K,V> q = (LinkedHashMap.Entry<K,V>)p;
LinkedHashMap.Entry<K,V> t =
new LinkedHashMap.Entry<K,V>(q.hash, q.key, q.value, next);
transferLinks(q, t);
return t;
}
TreeNode<K,V> newTreeNode(int hash, K key, V value, Node<K,V> next) {
TreeNode<K,V> p = new TreeNode<K,V>(hash, key, value, next);
linkNodeLast(p);
return p;
}
TreeNode<K,V> replacementTreeNode(Node<K,V> p, Node<K,V> next) {
LinkedHashMap.Entry<K,V> q = (LinkedHashMap.Entry<K,V>)p;
TreeNode<K,V> t = new TreeNode<K,V>(q.hash, q.key, q.value, next);
transferLinks(q, t);
return t;
}
void afterNodeRemoval(Node<K,V> e) { // unlink
LinkedHashMap.Entry<K,V> p =
(LinkedHashMap.Entry<K,V>)e, b = p.before, a = p.after;
p.before = p.after = null;
if (b == null)
head = a;
else
b.after = a;
if (a == null)
tail = b;
else
a.before = b;
}
void afterNodeInsertion(boolean evict) { // possibly remove eldest
LinkedHashMap.Entry<K,V> first;
if (evict && (first = head) != null && removeEldestEntry(first)) {
K key = first.key;
removeNode(hash(key), key, null, false, true);
}
}
void afterNodeAccess(Node<K,V> e) { // move node to last
LinkedHashMap.Entry<K,V> last;
if (accessOrder && (last = tail) != e) {
LinkedHashMap.Entry<K,V> p =
(LinkedHashMap.Entry<K,V>)e, b = p.before, a = p.after;
p.after = null;
if (b == null)
head = a;
else
b.after = a;
if (a != null)
a.before = b;
else
last = b;
if (last == null)
head = p;
else {
p.before = last;
last.after = p;
}
tail = p;
++modCount;
}
}
void internalWriteEntries(java.io.ObjectOutputStream s) throws IOException {
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after) {
s.writeObject(e.key);
s.writeObject(e.value);
}
}
/**
* Constructs an empty insertion-ordered <tt>LinkedHashMap</tt> instance
@ -221,8 +377,9 @@ public class LinkedHashMap<K,V>
* @throws NullPointerException if the specified map is null
*/
public LinkedHashMap(Map<? extends K, ? extends V> m) {
super(m);
super();
accessOrder = false;
putMapEntries(m, false);
}
/**
@ -243,16 +400,6 @@ public class LinkedHashMap<K,V>
this.accessOrder = accessOrder;
}
/**
* Called by superclass constructors and pseudoconstructors (clone,
* readObject) before any entries are inserted into the map. Initializes
* the chain.
*/
@Override
void init() {
header = new Entry<>(-1, null, null, null);
header.before = header.after = header;
}
/**
* Returns <tt>true</tt> if this map maps one or more keys to the
@ -263,15 +410,10 @@ public class LinkedHashMap<K,V>
* specified value
*/
public boolean containsValue(Object value) {
// Overridden to take advantage of faster iterator
if (value==null) {
for (Entry<?,?> e = header.after; e != header; e = e.after)
if (e.value==null)
return true;
} else {
for (Entry<?,?> e = header.after; e != header; e = e.after)
if (value.equals(e.value))
return true;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after) {
V v = e.value;
if (v == value || (value != null && value.equals(v)))
return true;
}
return false;
}
@ -292,10 +434,11 @@ public class LinkedHashMap<K,V>
* distinguish these two cases.
*/
public V get(Object key) {
Entry<K,V> e = (Entry<K,V>)getEntry(key);
if (e == null)
Node<K,V> e;
if ((e = getNode(hash(key), key)) == null)
return null;
e.recordAccess(this);
if (accessOrder)
afterNodeAccess(e);
return e.value;
}
@ -305,163 +448,7 @@ public class LinkedHashMap<K,V>
*/
public void clear() {
super.clear();
header.before = header.after = header;
}
@Override
public void forEach(BiConsumer<? super K, ? super V> action) {
Objects.requireNonNull(action);
int expectedModCount = modCount;
for (Entry<K, V> entry = header.after; entry != header; entry = entry.after) {
action.accept(entry.key, entry.value);
if (expectedModCount != modCount) {
throw new ConcurrentModificationException();
}
}
}
@Override
public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
Objects.requireNonNull(function);
int expectedModCount = modCount;
for (Entry<K, V> entry = header.after; entry != header; entry = entry.after) {
entry.value = function.apply(entry.key, entry.value);
if (expectedModCount != modCount) {
throw new ConcurrentModificationException();
}
}
}
/**
* LinkedHashMap entry.
*/
private static class Entry<K,V> extends HashMap.Entry<K,V> {
// These fields comprise the doubly linked list used for iteration.
Entry<K,V> before, after;
Entry(int hash, K key, V value, Object next) {
super(hash, key, value, next);
}
/**
* Removes this entry from the linked list.
*/
private void remove() {
before.after = after;
after.before = before;
}
/**
* Inserts this entry before the specified existing entry in the list.
*/
private void addBefore(Entry<K,V> existingEntry) {
after = existingEntry;
before = existingEntry.before;
before.after = this;
after.before = this;
}
/**
* This method is invoked by the superclass whenever the value
* of a pre-existing entry is read by Map.get or modified by Map.put.
* If the enclosing Map is access-ordered, it moves the entry
* to the end of the list; otherwise, it does nothing.
*/
void recordAccess(HashMap<K,V> m) {
LinkedHashMap<K,V> lm = (LinkedHashMap<K,V>)m;
if (lm.accessOrder) {
lm.modCount++;
remove();
addBefore(lm.header);
}
}
void recordRemoval(HashMap<K,V> m) {
remove();
}
}
private abstract class LinkedHashIterator<T> implements Iterator<T> {
Entry<K,V> nextEntry = header.after;
Entry<K,V> lastReturned = null;
/**
* The modCount value that the iterator believes that the backing
* List should have. If this expectation is violated, the iterator
* has detected concurrent modification.
*/
int expectedModCount = modCount;
public boolean hasNext() {
return nextEntry != header;
}
public void remove() {
if (lastReturned == null)
throw new IllegalStateException();
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
LinkedHashMap.this.remove(lastReturned.key);
lastReturned = null;
expectedModCount = modCount;
}
Entry<K,V> nextEntry() {
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
if (nextEntry == header)
throw new NoSuchElementException();
Entry<K,V> e = lastReturned = nextEntry;
nextEntry = e.after;
return e;
}
}
private class KeyIterator extends LinkedHashIterator<K> {
public K next() { return nextEntry().getKey(); }
}
private class ValueIterator extends LinkedHashIterator<V> {
public V next() { return nextEntry().value; }
}
private class EntryIterator extends LinkedHashIterator<Map.Entry<K,V>> {
public Map.Entry<K,V> next() { return nextEntry(); }
}
// These Overrides alter the behavior of superclass view iterator() methods
Iterator<K> newKeyIterator() { return new KeyIterator(); }
Iterator<V> newValueIterator() { return new ValueIterator(); }
Iterator<Map.Entry<K,V>> newEntryIterator() { return new EntryIterator(); }
/**
* This override alters behavior of superclass put method. It causes newly
* allocated entry to get inserted at the end of the linked list and
* removes the eldest entry if appropriate.
*/
@Override
void addEntry(int hash, K key, V value, int bucketIndex, boolean checkIfNeedTree) {
super.addEntry(hash, key, value, bucketIndex, checkIfNeedTree);
// Remove eldest entry if instructed
Entry<K,V> eldest = header.after;
if (removeEldestEntry(eldest)) {
removeEntryForKey(eldest.key);
}
}
/*
* Create a new LinkedHashMap.Entry and setup the before/after pointers
*/
@Override
HashMap.Entry<K,V> newEntry(int hash, K key, V value, Object next) {
Entry<K,V> newEntry = new Entry<>(hash, key, value, next);
newEntry.addBefore(header);
return newEntry;
head = tail = null;
}
/**
@ -475,13 +462,13 @@ public class LinkedHashMap<K,V>
* <p>Sample use: this override will allow the map to grow up to 100
* entries and then delete the eldest entry each time a new entry is
* added, maintaining a steady state of 100 entries.
* <pre>{@code
* <pre>
* private static final int MAX_ENTRIES = 100;
*
* protected boolean removeEldestEntry(Map.Entry eldest) {
* return size() > MAX_ENTRIES;
* return size() &gt; MAX_ENTRIES;
* }
* }</pre>
* </pre>
*
* <p>This method typically does not modify the map in any way,
* instead allowing the map to modify itself as directed by its
@ -508,4 +495,241 @@ public class LinkedHashMap<K,V>
protected boolean removeEldestEntry(Map.Entry<K,V> eldest) {
return false;
}
/**
* Returns a {@link Set} view of the keys contained in this map.
* The set is backed by the map, so changes to the map are
* reflected in the set, and vice-versa. If the map is modified
* while an iteration over the set is in progress (except through
* the iterator's own <tt>remove</tt> operation), the results of
* the iteration are undefined. The set supports element removal,
* which removes the corresponding mapping from the map, via the
* <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
* <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
* operations. It does not support the <tt>add</tt> or <tt>addAll</tt>
* operations.
* Its {@link Spliterator} typically provides faster sequential
* performance but much poorer parallel performance than that of
* {@code HashMap}.
*
* @return a set view of the keys contained in this map
*/
public Set<K> keySet() {
Set<K> ks;
return (ks = keySet) == null ? (keySet = new LinkedKeySet()) : ks;
}
final class LinkedKeySet extends AbstractSet<K> {
public final int size() { return size; }
public final void clear() { LinkedHashMap.this.clear(); }
public final Iterator<K> iterator() {
return new LinkedKeyIterator();
}
public final boolean contains(Object o) { return containsKey(o); }
public final boolean remove(Object key) {
return removeNode(hash(key), key, null, false, true) != null;
}
public final Spliterator<K> spliterator() {
return Spliterators.spliterator(this, Spliterator.SIZED |
Spliterator.ORDERED |
Spliterator.DISTINCT);
}
public final void forEach(Consumer<? super K> action) {
if (action == null)
throw new NullPointerException();
int mc = modCount;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after)
action.accept(e.key);
if (modCount != mc)
throw new ConcurrentModificationException();
}
}
/**
* Returns a {@link Collection} view of the values contained in this map.
* The collection is backed by the map, so changes to the map are
* reflected in the collection, and vice-versa. If the map is
* modified while an iteration over the collection is in progress
* (except through the iterator's own <tt>remove</tt> operation),
* the results of the iteration are undefined. The collection
* supports element removal, which removes the corresponding
* mapping from the map, via the <tt>Iterator.remove</tt>,
* <tt>Collection.remove</tt>, <tt>removeAll</tt>,
* <tt>retainAll</tt> and <tt>clear</tt> operations. It does not
* support the <tt>add</tt> or <tt>addAll</tt> operations.
* Its {@link Spliterator} typically provides faster sequential
* performance but much poorer parallel performance than that of
* {@code HashMap}.
*
* @return a view of the values contained in this map
*/
public Collection<V> values() {
Collection<V> vs;
return (vs = values) == null ? (values = new LinkedValues()) : vs;
}
final class LinkedValues extends AbstractCollection<V> {
public final int size() { return size; }
public final void clear() { LinkedHashMap.this.clear(); }
public final Iterator<V> iterator() {
return new LinkedValueIterator();
}
public final boolean contains(Object o) { return containsValue(o); }
public final Spliterator<V> spliterator() {
return Spliterators.spliterator(this, Spliterator.SIZED |
Spliterator.ORDERED);
}
public final void forEach(Consumer<? super V> action) {
if (action == null)
throw new NullPointerException();
int mc = modCount;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after)
action.accept(e.value);
if (modCount != mc)
throw new ConcurrentModificationException();
}
}
/**
* Returns a {@link Set} view of the mappings contained in this map.
* The set is backed by the map, so changes to the map are
* reflected in the set, and vice-versa. If the map is modified
* while an iteration over the set is in progress (except through
* the iterator's own <tt>remove</tt> operation, or through the
* <tt>setValue</tt> operation on a map entry returned by the
* iterator) the results of the iteration are undefined. The set
* supports element removal, which removes the corresponding
* mapping from the map, via the <tt>Iterator.remove</tt>,
* <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt> and
* <tt>clear</tt> operations. It does not support the
* <tt>add</tt> or <tt>addAll</tt> operations.
* Its {@link Spliterator} typically provides faster sequential
* performance but much poorer parallel performance than that of
* {@code HashMap}.
*
* @return a set view of the mappings contained in this map
*/
public Set<Map.Entry<K,V>> entrySet() {
Set<Map.Entry<K,V>> es;
return (es = entrySet) == null ? (entrySet = new LinkedEntrySet()) : es;
}
final class LinkedEntrySet extends AbstractSet<Map.Entry<K,V>> {
public final int size() { return size; }
public final void clear() { LinkedHashMap.this.clear(); }
public final Iterator<Map.Entry<K,V>> iterator() {
return new LinkedEntryIterator();
}
public final boolean contains(Object o) {
if (!(o instanceof Map.Entry))
return false;
Map.Entry<?,?> e = (Map.Entry<?,?>) o;
Object key = e.getKey();
Node<K,V> candidate = getNode(hash(key), key);
return candidate != null && candidate.equals(e);
}
public final boolean remove(Object o) {
if (o instanceof Map.Entry) {
Map.Entry<?,?> e = (Map.Entry<?,?>) o;
Object key = e.getKey();
Object value = e.getValue();
return removeNode(hash(key), key, value, true, true) != null;
}
return false;
}
public final Spliterator<Map.Entry<K,V>> spliterator() {
return Spliterators.spliterator(this, Spliterator.SIZED |
Spliterator.ORDERED |
Spliterator.DISTINCT);
}
public final void forEach(Consumer<? super Map.Entry<K,V>> action) {
if (action == null)
throw new NullPointerException();
int mc = modCount;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after)
action.accept(e);
if (modCount != mc)
throw new ConcurrentModificationException();
}
}
// Map overrides
public void forEach(BiConsumer<? super K, ? super V> action) {
if (action == null)
throw new NullPointerException();
int mc = modCount;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after)
action.accept(e.key, e.value);
if (modCount != mc)
throw new ConcurrentModificationException();
}
public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
if (function == null)
throw new NullPointerException();
int mc = modCount;
for (LinkedHashMap.Entry<K,V> e = head; e != null; e = e.after)
e.value = function.apply(e.key, e.value);
if (modCount != mc)
throw new ConcurrentModificationException();
}
// Iterators
abstract class LinkedHashIterator {
LinkedHashMap.Entry<K,V> next;
LinkedHashMap.Entry<K,V> current;
int expectedModCount;
LinkedHashIterator() {
next = head;
expectedModCount = modCount;
current = null;
}
public final boolean hasNext() {
return next != null;
}
final LinkedHashMap.Entry<K,V> nextNode() {
LinkedHashMap.Entry<K,V> e = next;
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
if (e == null)
throw new NoSuchElementException();
current = e;
next = e.after;
return e;
}
public final void remove() {
Node<K,V> p = current;
if (p == null)
throw new IllegalStateException();
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
current = null;
K key = p.key;
removeNode(hash(key), key, null, false, false);
expectedModCount = modCount;
}
}
final class LinkedKeyIterator extends LinkedHashIterator
implements Iterator<K> {
public final K next() { return nextNode().getKey(); }
}
final class LinkedValueIterator extends LinkedHashIterator
implements Iterator<V> {
public final V next() { return nextNode().value; }
}
final class LinkedEntryIterator extends LinkedHashIterator
implements Iterator<Map.Entry<K,V>> {
public final Map.Entry<K,V> next() { return nextNode(); }
}
}

View File

@ -50,9 +50,9 @@ import static java.util.Arrays.*;
"java.util.HashMap$EntryIterator",
"java.util.HashMap$KeyIterator",
"java.util.HashMap$ValueIterator",
"java.util.LinkedHashMap$EntryIterator",
"java.util.LinkedHashMap$KeyIterator",
"java.util.LinkedHashMap$ValueIterator"})
"java.util.LinkedHashMap$LinkedEntryIterator",
"java.util.LinkedHashMap$LinkedKeyIterator",
"java.util.LinkedHashMap$LinkedValueIterator"})
public class Probe {
public static void main (String... args) throws Throwable {
Classes classesAnnotation = (Probe.class).getAnnotation(Classes.class);

View File

@ -53,8 +53,6 @@ public class CheckRandomHashSeed {
throw new Error("Error in test setup: " + (expectRandom ? "" : "not " ) + "expecting random hashSeed, but " + PROP_NAME + " is " + (propSet ? "" : "not ") + "enabled");
}
testMap(new HashMap());
testMap(new LinkedHashMap());
testMap(new WeakHashMap());
testMap(new Hashtable());
}

View File

@ -25,7 +25,6 @@
* @test
* @bug 8005698
* @run main InPlaceOpsCollisions -shortrun
* @run main/othervm -Djdk.map.randomseed=true InPlaceOpsCollisions -shortrun
* @summary Ensure overrides of in-place operations in Maps behave well with lots of collisions.
* @author Brent Christian
*/

View File

@ -0,0 +1,240 @@
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.testng.Assert.assertEquals;
/*
* @test
* @bug 8023463
* @summary Test the case where a bin is treeified and vice verser
* @run testng MapBinToFromTreeTest
*/
@Test
public class MapBinToFromTreeTest {
// Initial capacity of map
// Should be >= the map capacity for treeifiying, see HashMap/ConcurrentMap.MIN_TREEIFY_CAPACITY
static final int INITIAL_CAPACITY = 64;
// Maximum size of map
// Should be > the treeify threshold, see HashMap/ConcurrentMap.TREEIFY_THRESHOLD
// Should be > INITIAL_CAPACITY to ensure resize occurs
static final int SIZE = 256;
// Load factor of map
// A value 1.0 will ensure that a new threshold == capacity
static final float LOAD_FACTOR = 1.0f;
@DataProvider(name = "maps")
static Object[][] mapProvider() {
return new Object[][] {
// Pass in the class name as a description for test reporting
// purposes
{ HashMap.class.getName(), new HashMap(INITIAL_CAPACITY, LOAD_FACTOR) },
{ LinkedHashMap.class.getName(), new LinkedHashMap(INITIAL_CAPACITY, LOAD_FACTOR) },
{ ConcurrentHashMap.class.getName(), new ConcurrentHashMap(INITIAL_CAPACITY, LOAD_FACTOR) },
};
}
@Test(dataProvider = "maps")
public void testPutThenGet(String d, Map<HashCodeInteger, Integer> m) {
put(SIZE, m, (i, s) -> {
for (int j = 0; j < s; j++) {
assertEquals(m.get(new HashCodeInteger(j)).intValue(), j,
String.format("Map.get(%d)", j));
}
});
}
@Test(dataProvider = "maps")
public void testPutThenTraverse(String d, Map<HashCodeInteger, Integer> m) {
Collector<Integer, ?, ? extends Collection<Integer>> c = getCollector(m);
put(SIZE, m, (i, s) -> {
// Note that it is OK to collect to a Set (HashSet) as long as
// integer values are used since these tests only check for
// collisions and other tests will verify more general functionality
Collection<Integer> actual = m.keySet().stream().map(e -> e.value).collect(c);
Collection<Integer> expected = IntStream.range(0, s).boxed().collect(c);
assertEquals(actual, expected, "Map.keySet()");
});
}
@Test(dataProvider = "maps")
public void testRemoveThenGet(String d, Map<HashCodeInteger, Integer> m) {
put(SIZE, m, (i, s) -> { });
remove(m, (i, s) -> {
for (int j = i + 1; j < SIZE; j++) {
assertEquals(m.get(new HashCodeInteger(j)).intValue(), j,
String.format("Map.get(%d)", j));
}
});
}
@Test(dataProvider = "maps")
public void testRemoveThenTraverse(String d, Map<HashCodeInteger, Integer> m) {
put(SIZE, m, (i, s) -> { });
Collector<Integer, ?, ? extends Collection<Integer>> c = getCollector(m);
remove(m, (i, s) -> {
Collection<Integer> actual = m.keySet().stream().map(e -> e.value).collect(c);
Collection<Integer> expected = IntStream.range(i + 1, SIZE).boxed().collect(c);
assertEquals(actual, expected, "Map.keySet()");
});
}
@Test(dataProvider = "maps")
public void testUntreeifyOnResizeWithGet(String d, Map<HashCodeInteger, Integer> m) {
// Fill the map with 64 entries grouped into 4 buckets
put(INITIAL_CAPACITY, m, (i, s) -> { });
for (int i = INITIAL_CAPACITY; i < SIZE; i++) {
// Add further entries in the 0'th bucket so as not to disturb
// other buckets, entries of which may be distributed and/or
// the bucket untreeified on resize
m.put(new HashCodeInteger(i, 0), i);
for (int j = 0; j < INITIAL_CAPACITY; j++) {
assertEquals(m.get(new HashCodeInteger(j)).intValue(), j,
String.format("Map.get(%d) < INITIAL_CAPACITY", j));
}
for (int j = INITIAL_CAPACITY; j <= i; j++) {
assertEquals(m.get(new HashCodeInteger(j, 0)).intValue(), j,
String.format("Map.get(%d) >= INITIAL_CAPACITY", j));
}
}
}
@Test(dataProvider = "maps")
public void testUntreeifyOnResizeWithTraverse(String d, Map<HashCodeInteger, Integer> m) {
// Fill the map with 64 entries grouped into 4 buckets
put(INITIAL_CAPACITY, m, (i, s) -> { });
Collector<Integer, ?, ? extends Collection<Integer>> c = getCollector(m);
for (int i = INITIAL_CAPACITY; i < SIZE; i++) {
// Add further entries in the 0'th bucket so as not to disturb
// other buckets, entries of which may be distributed and/or
// the bucket untreeified on resize
m.put(new HashCodeInteger(i, 0), i);
Collection<Integer> actual = m.keySet().stream().map(e -> e.value).collect(c);
Collection<Integer> expected = IntStream.rangeClosed(0, i).boxed().collect(c);
assertEquals(actual, expected, "Key set");
}
}
Collector<Integer, ?, ? extends Collection<Integer>> getCollector(Map<?, ?> m) {
Collector<Integer, ?, ? extends Collection<Integer>> collector = m instanceof LinkedHashMap
? Collectors.toList()
: Collectors.toSet();
return collector;
}
void put(int size, Map<HashCodeInteger, Integer> m, BiConsumer<Integer, Integer> c) {
for (int i = 0; i < size; i++) {
m.put(new HashCodeInteger(i), i);
c.accept(i, m.size());
}
}
void remove(Map<HashCodeInteger, Integer> m, BiConsumer<Integer, Integer> c) {
int size = m.size();
// Remove all elements thus ensuring at some point trees will be
// converting back to bins
for (int i = 0; i < size; i++) {
m.remove(new HashCodeInteger(i));
c.accept(i, m.size());
}
}
final static class HashCodeInteger implements Comparable<HashCodeInteger> {
final int value;
final int hashcode;
HashCodeInteger(int value) {
this(value, hash(value));
}
HashCodeInteger(int value, int hashcode) {
this.value = value;
this.hashcode = hashcode;
}
static int hash(int i) {
// Assuming 64 entries with keys from 0 to 63 then a map:
// - of capacity 64 will have 4 buckets with 16 entries per-bucket
// - of capacity 128 will have 8 buckets with 8 entries per-bucket
// - of capacity 256 will have 16 buckets with 4 entries per-bucket
//
// Re-sizing will result in re-distribution, doubling the buckets
// and reducing the entries by half. This will result in
// untreeifying when the number of entries is less than untreeify
// threshold (see HashMap/ConcurrentMap.UNTREEIFY_THRESHOLD)
return (i % 4) + (i / 4) * INITIAL_CAPACITY;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof HashCodeInteger) {
HashCodeInteger other = (HashCodeInteger) obj;
return other.value == value;
}
return false;
}
@Override
public int hashCode() {
return hashcode;
}
@Override
public int compareTo(HashCodeInteger o) {
return value - o.value;
}
@Override
public String toString() {
return Integer.toString(value);
}
}
}

View File

@ -1,255 +0,0 @@
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.util.*;
import java.lang.reflect.Field;
/*
* @test
* @bug 8005698
* @summary Test the case where TreeBin.splitTreeBin() converts a bin back to an Entry list
* @run main TreeBinSplitBackToEntries unused
* @author Brent Christian
*/
public class TreeBinSplitBackToEntries {
private static int EXPECTED_TREE_THRESHOLD = 16;
// Easiest if this covers one bit higher then 'bit' in splitTreeBin() on the
// call where the TreeBin is converted back to an Entry list
private static int HASHMASK = 0x7F;
private static boolean verbose = false;
private static boolean fastFail = false;
private static boolean failed = false;
static void printlnIfVerbose(String msg) {
if (verbose) {System.out.println(msg); }
}
public static void main(String[] args) {
for (String arg : args) {
switch(arg) {
case "-verbose":
verbose = true;
break;
case "-fastfail":
fastFail = true;
break;
}
}
checkTreeThreshold();
testMapHiTree();
testMapLoTree();
if (failed) {
System.out.println("Test Failed");
System.exit(1);
} else {
System.out.println("Test Passed");
}
}
public static void checkTreeThreshold() {
int threshold = -1;
try {
Class treeBinClass = Class.forName("java.util.HashMap$TreeBin");
Field treeThreshold = treeBinClass.getDeclaredField("TREE_THRESHOLD");
treeThreshold.setAccessible(true);
threshold = treeThreshold.getInt(treeBinClass);
} catch (ClassNotFoundException|NoSuchFieldException|IllegalAccessException e) {
e.printStackTrace();
throw new Error("Problem accessing TreeBin.TREE_THRESHOLD", e);
}
check("Expected TREE_THRESHOLD: " + EXPECTED_TREE_THRESHOLD +", found: " + threshold,
threshold == EXPECTED_TREE_THRESHOLD);
printlnIfVerbose("TREE_THRESHOLD: " + threshold);
}
public static void testMapHiTree() {
Object[][] mapKeys = makeHiTreeTestData();
testMapsForKeys(mapKeys, "hiTree");
}
public static void testMapLoTree() {
Object[][] mapKeys = makeLoTreeTestData();
testMapsForKeys(mapKeys, "loTree");
}
public static void testMapsForKeys(Object[][] mapKeys, String desc) {
// loop through data sets
for (Object[] keys_desc : mapKeys) {
Map<Object, Object>[] maps = (Map<Object, Object>[]) new Map[]{
new HashMap<>(4, 0.8f),
new LinkedHashMap<>(4, 0.8f),
};
// for each map type.
for (Map<Object, Object> map : maps) {
Object[] keys = (Object[]) keys_desc[1];
System.out.println(desc + ": testPutThenGet() for " + map.getClass());
testPutThenGet(map, keys);
}
}
}
private static <T> void testPutThenGet(Map<T, T> map, T[] keys) {
for (T key : keys) {
printlnIfVerbose("put()ing 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + ", hashCode=" + Integer.toHexString(key.hashCode()));
map.put(key, key);
}
for (T key : keys) {
check("key: 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + " not found in resulting " + map.getClass().getSimpleName(), map.get(key) != null);
}
}
/* Data to force a non-empty loTree in TreeBin.splitTreeBin() to be converted back
* into an Entry list
*/
private static Object[][] makeLoTreeTestData() {
HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
new HashableInteger( 0x23, HASHMASK),
new HashableInteger( 0x123, HASHMASK),
new HashableInteger( 0x323, HASHMASK),
new HashableInteger( 0x523, HASHMASK),
new HashableInteger( 0x723, HASHMASK),
new HashableInteger( 0x923, HASHMASK),
new HashableInteger( 0xB23, HASHMASK),
new HashableInteger( 0xD23, HASHMASK),
new HashableInteger( 0xF23, HASHMASK),
new HashableInteger( 0xF123, HASHMASK),
new HashableInteger( 0x1023, HASHMASK),
new HashableInteger( 0x1123, HASHMASK),
new HashableInteger( 0x1323, HASHMASK),
new HashableInteger( 0x1523, HASHMASK),
new HashableInteger( 0x1723, HASHMASK),
new HashableInteger( 0x1923, HASHMASK),
new HashableInteger( 0x1B23, HASHMASK),
new HashableInteger( 0x1D23, HASHMASK),
new HashableInteger( 0x3123, HASHMASK),
new HashableInteger( 0x3323, HASHMASK),
new HashableInteger( 0x3523, HASHMASK),
new HashableInteger( 0x3723, HASHMASK),
new HashableInteger( 0x1001, HASHMASK),
new HashableInteger( 0x4001, HASHMASK),
new HashableInteger( 0x1, HASHMASK),
};
return new Object[][] {
new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
};
}
/* Data to force the hiTree in TreeBin.splitTreeBin() to be converted back
* into an Entry list
*/
private static Object[][] makeHiTreeTestData() {
HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
new HashableInteger( 0x1, HASHMASK),
new HashableInteger( 0x101, HASHMASK),
new HashableInteger( 0x301, HASHMASK),
new HashableInteger( 0x501, HASHMASK),
new HashableInteger( 0x701, HASHMASK),
new HashableInteger( 0x1001, HASHMASK),
new HashableInteger( 0x1101, HASHMASK),
new HashableInteger( 0x1301, HASHMASK),
new HashableInteger( 0x1501, HASHMASK),
new HashableInteger( 0x1701, HASHMASK),
new HashableInteger( 0x4001, HASHMASK),
new HashableInteger( 0x4101, HASHMASK),
new HashableInteger( 0x4301, HASHMASK),
new HashableInteger( 0x4501, HASHMASK),
new HashableInteger( 0x4701, HASHMASK),
new HashableInteger( 0x8001, HASHMASK),
new HashableInteger( 0x8101, HASHMASK),
new HashableInteger( 0x8301, HASHMASK),
new HashableInteger( 0x8501, HASHMASK),
new HashableInteger( 0x8701, HASHMASK),
new HashableInteger( 0x9001, HASHMASK),
new HashableInteger( 0x23, HASHMASK),
new HashableInteger( 0x123, HASHMASK),
new HashableInteger( 0x323, HASHMASK),
new HashableInteger( 0x523, HASHMASK),
};
return new Object[][] {
new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
};
}
static void check(String desc, boolean cond) {
if (!cond) {
fail(desc);
}
}
static void fail(String msg) {
failed = true;
(new Error("Failure: " + msg)).printStackTrace(System.err);
if (fastFail) {
System.exit(1);
}
}
final static class HashableInteger implements Comparable<HashableInteger> {
final int value;
final int hashmask; //yes duplication
HashableInteger(int value, int hashmask) {
this.value = value;
this.hashmask = hashmask;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof HashableInteger) {
HashableInteger other = (HashableInteger) obj;
return other.value == value;
}
return false;
}
@Override
public int hashCode() {
// This version ANDs the mask
return value & hashmask;
}
@Override
public int compareTo(HashableInteger o) {
return value - o.value;
}
@Override
public String toString() {
return Integer.toString(value);
}
}
}

View File

@ -23,7 +23,7 @@
/**
* @test
* @bug 8020156 8020009 8022326
* @bug 8020156 8020009 8022326 8012913
* @run testng SpliteratorCharacteristics
*/
@ -32,6 +32,10 @@ import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
@ -47,7 +51,27 @@ import static org.testng.Assert.*;
@Test
public class SpliteratorCharacteristics {
// TreeMap
public void testHashMap() {
assertMapCharacteristics(new HashMap<>(),
Spliterator.SIZED | Spliterator.DISTINCT);
}
public void testHashSet() {
assertSetCharacteristics(new HashSet<>(),
Spliterator.SIZED | Spliterator.DISTINCT);
}
public void testLinkedHashMap() {
assertMapCharacteristics(new LinkedHashMap<>(),
Spliterator.SIZED | Spliterator.DISTINCT |
Spliterator.ORDERED);
}
public void testLinkedHashSet() {
assertSetCharacteristics(new LinkedHashSet<>(),
Spliterator.SIZED | Spliterator.DISTINCT |
Spliterator.ORDERED);
}
public void testTreeMap() {
assertSortedMapCharacteristics(new TreeMap<>(),
@ -61,9 +85,6 @@ public class SpliteratorCharacteristics {
Spliterator.SORTED | Spliterator.ORDERED);
}
// TreeSet
public void testTreeSet() {
assertSortedSetCharacteristics(new TreeSet<>(),
Spliterator.SIZED | Spliterator.DISTINCT |
@ -76,9 +97,6 @@ public class SpliteratorCharacteristics {
Spliterator.SORTED | Spliterator.ORDERED);
}
// ConcurrentSkipListMap
public void testConcurrentSkipListMap() {
assertSortedMapCharacteristics(new ConcurrentSkipListMap<>(),
Spliterator.CONCURRENT | Spliterator.NONNULL |
@ -93,9 +111,6 @@ public class SpliteratorCharacteristics {
Spliterator.ORDERED);
}
// ConcurrentSkipListSet
public void testConcurrentSkipListSet() {
assertSortedSetCharacteristics(new ConcurrentSkipListSet<>(),
Spliterator.CONCURRENT | Spliterator.NONNULL |
@ -113,35 +128,58 @@ public class SpliteratorCharacteristics {
//
void assertSortedMapCharacteristics(SortedMap<Integer, String> m, int keyCharacteristics) {
void assertMapCharacteristics(Map<Integer, String> m, int keyCharacteristics) {
assertMapCharacteristics(m, keyCharacteristics, 0);
}
void assertMapCharacteristics(Map<Integer, String> m, int keyCharacteristics, int notValueCharacteristics) {
initMap(m);
boolean hasComparator = m.comparator() != null;
assertCharacteristics(m.keySet(), keyCharacteristics);
assertCharacteristics(m.values(),
keyCharacteristics & ~(Spliterator.DISTINCT | notValueCharacteristics));
assertCharacteristics(m.entrySet(), keyCharacteristics);
if ((keyCharacteristics & Spliterator.SORTED) == 0) {
assertISEComparator(m.keySet());
assertISEComparator(m.values());
assertISEComparator(m.entrySet());
}
}
void assertSetCharacteristics(Set<Integer> s, int keyCharacteristics) {
initSet(s);
assertCharacteristics(s, keyCharacteristics);
if ((keyCharacteristics & Spliterator.SORTED) == 0) {
assertISEComparator(s);
}
}
void assertSortedMapCharacteristics(SortedMap<Integer, String> m, int keyCharacteristics) {
assertMapCharacteristics(m, keyCharacteristics, Spliterator.SORTED);
Set<Integer> keys = m.keySet();
assertCharacteristics(keys, keyCharacteristics);
if (hasComparator) {
if (m.comparator() != null) {
assertNotNullComparator(keys);
}
else {
assertNullComparator(keys);
}
assertCharacteristics(m.values(),
keyCharacteristics & ~(Spliterator.DISTINCT | Spliterator.SORTED));
assertISEComparator(m.values());
assertCharacteristics(m.entrySet(), keyCharacteristics);
assertNotNullComparator(m.entrySet());
}
void assertSortedSetCharacteristics(SortedSet<Integer> s, int keyCharacteristics) {
initSet(s);
assertSetCharacteristics(s, keyCharacteristics);
boolean hasComparator = s.comparator() != null;
assertCharacteristics(s, keyCharacteristics);
if (hasComparator) {
if (s.comparator() != null) {
assertNotNullComparator(s);
}
else {
@ -161,27 +199,18 @@ public class SpliteratorCharacteristics {
}
void assertCharacteristics(Collection<?> c, int expectedCharacteristics) {
assertCharacteristics(c.spliterator(), expectedCharacteristics);
}
void assertCharacteristics(Spliterator<?> s, int expectedCharacteristics) {
assertTrue(s.hasCharacteristics(expectedCharacteristics));
assertTrue(c.spliterator().hasCharacteristics(expectedCharacteristics),
"Spliterator characteristics");
}
void assertNullComparator(Collection<?> c) {
assertNullComparator(c.spliterator());
}
void assertNullComparator(Spliterator<?> s) {
assertNull(s.getComparator());
assertNull(c.spliterator().getComparator(),
"Comparator of Spliterator of Collection");
}
void assertNotNullComparator(Collection<?> c) {
assertNotNullComparator(c.spliterator());
}
void assertNotNullComparator(Spliterator<?> s) {
assertNotNull(s.getComparator());
assertNotNull(c.spliterator().getComparator(),
"Comparator of Spliterator of Collection");
}
void assertISEComparator(Collection<?> c) {
@ -196,6 +225,6 @@ public class SpliteratorCharacteristics {
catch (IllegalStateException e) {
caught = true;
}
assertTrue(caught);
assertTrue(caught, "Throwing IllegalStateException");
}
}