8005698: Handle Frequent HashMap Collisions with Balanced Trees
HashMap bins with many collisions store entries in balanced trees Reviewed-by: alanb, dl, mduigou
This commit is contained in:
parent
8452360411
commit
53a90d216f
File diff suppressed because it is too large
Load Diff
@ -180,13 +180,27 @@ public class Hashtable<K,V>
|
||||
*/
|
||||
static final long HASHSEED_OFFSET;
|
||||
|
||||
static final boolean USE_HASHSEED;
|
||||
|
||||
static {
|
||||
try {
|
||||
UNSAFE = sun.misc.Unsafe.getUnsafe();
|
||||
HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
|
||||
Hashtable.class.getDeclaredField("hashSeed"));
|
||||
} catch (NoSuchFieldException | SecurityException e) {
|
||||
throw new InternalError("Failed to record hashSeed offset", e);
|
||||
String hashSeedProp = java.security.AccessController.doPrivileged(
|
||||
new sun.security.action.GetPropertyAction(
|
||||
"jdk.map.useRandomSeed"));
|
||||
boolean localBool = (null != hashSeedProp)
|
||||
? Boolean.parseBoolean(hashSeedProp) : false;
|
||||
USE_HASHSEED = localBool;
|
||||
|
||||
if (USE_HASHSEED) {
|
||||
try {
|
||||
UNSAFE = sun.misc.Unsafe.getUnsafe();
|
||||
HASHSEED_OFFSET = UNSAFE.objectFieldOffset(
|
||||
Hashtable.class.getDeclaredField("hashSeed"));
|
||||
} catch (NoSuchFieldException | SecurityException e) {
|
||||
throw new InternalError("Failed to record hashSeed offset", e);
|
||||
}
|
||||
} else {
|
||||
UNSAFE = null;
|
||||
HASHSEED_OFFSET = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -194,21 +208,24 @@ public class Hashtable<K,V>
|
||||
/**
|
||||
* A randomizing value associated with this instance that is applied to
|
||||
* hash code of keys to make hash collisions harder to find.
|
||||
*
|
||||
* Non-final so it can be set lazily, but be sure not to set more than once.
|
||||
*/
|
||||
transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
|
||||
transient final int hashSeed;
|
||||
|
||||
/**
|
||||
* Return an initial value for the hashSeed, or 0 if the random seed is not
|
||||
* enabled.
|
||||
*/
|
||||
final int initHashSeed() {
|
||||
if (sun.misc.VM.isBooted() && Holder.USE_HASHSEED) {
|
||||
return sun.misc.Hashing.randomHashSeed(this);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int hash(Object k) {
|
||||
if (k instanceof String) {
|
||||
return ((String)k).hash32();
|
||||
}
|
||||
|
||||
int h = hashSeed ^ k.hashCode();
|
||||
|
||||
// This function ensures that hashCodes that differ only by
|
||||
// constant multiples at each bit position have a bounded
|
||||
// number of collisions (approximately 8 at default load factor).
|
||||
h ^= (h >>> 20) ^ (h >>> 12);
|
||||
return h ^ (h >>> 7) ^ (h >>> 4);
|
||||
return hashSeed ^ k.hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -232,6 +249,7 @@ public class Hashtable<K,V>
|
||||
this.loadFactor = loadFactor;
|
||||
table = new Entry<?,?>[initialCapacity];
|
||||
threshold = (int)Math.min(initialCapacity * loadFactor, MAX_ARRAY_SIZE + 1);
|
||||
hashSeed = initHashSeed();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1187,8 +1205,10 @@ public class Hashtable<K,V>
|
||||
s.defaultReadObject();
|
||||
|
||||
// set hashMask
|
||||
Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
|
||||
sun.misc.Hashing.randomHashSeed(this));
|
||||
if (Holder.USE_HASHSEED) {
|
||||
Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
|
||||
sun.misc.Hashing.randomHashSeed(this));
|
||||
}
|
||||
|
||||
// Read the original length of the array and number of elements
|
||||
int origlength = s.readInt();
|
||||
|
@ -55,9 +55,9 @@ import java.io.*;
|
||||
* order they were presented.)
|
||||
*
|
||||
* <p>A special {@link #LinkedHashMap(int,float,boolean) constructor} is
|
||||
* provided to create a linked hash map whose order of iteration is the order
|
||||
* in which its entries were last accessed, from least-recently accessed to
|
||||
* most-recently (<i>access-order</i>). This kind of map is well-suited to
|
||||
* provided to create a <tt>LinkedHashMap</tt> whose order of iteration is the
|
||||
* order in which its entries were last accessed, from least-recently accessed
|
||||
* to most-recently (<i>access-order</i>). This kind of map is well-suited to
|
||||
* building LRU caches. Invoking the <tt>put</tt> or <tt>get</tt> method
|
||||
* results in an access to the corresponding entry (assuming it exists after
|
||||
* the invocation completes). The <tt>putAll</tt> method generates one entry
|
||||
@ -242,23 +242,6 @@ public class LinkedHashMap<K,V>
|
||||
header.before = header.after = header;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transfers all entries to new table array. This method is called
|
||||
* by superclass resize. It is overridden for performance, as it is
|
||||
* faster to iterate using our linked list.
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
void transfer(HashMap.Entry[] newTable) {
|
||||
int newCapacity = newTable.length;
|
||||
for (Entry<K,V> e = header.after; e != header; e = e.after) {
|
||||
int index = indexFor(e.hash, newCapacity);
|
||||
e.next = (HashMap.Entry<K,V>)newTable[index];
|
||||
newTable[index] = e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns <tt>true</tt> if this map maps one or more keys to the
|
||||
* specified value.
|
||||
@ -320,7 +303,7 @@ public class LinkedHashMap<K,V>
|
||||
// These fields comprise the doubly linked list used for iteration.
|
||||
Entry<K,V> before, after;
|
||||
|
||||
Entry(int hash, K key, V value, HashMap.Entry<K,V> next) {
|
||||
Entry(int hash, K key, V value, Object next) {
|
||||
super(hash, key, value, next);
|
||||
}
|
||||
|
||||
@ -344,7 +327,7 @@ public class LinkedHashMap<K,V>
|
||||
|
||||
/**
|
||||
* This method is invoked by the superclass whenever the value
|
||||
* of a pre-existing entry is read by Map.get or modified by Map.set.
|
||||
* of a pre-existing entry is read by Map.get or modified by Map.put.
|
||||
* If the enclosing Map is access-ordered, it moves the entry
|
||||
* to the end of the list; otherwise, it does nothing.
|
||||
*/
|
||||
@ -422,8 +405,9 @@ public class LinkedHashMap<K,V>
|
||||
* allocated entry to get inserted at the end of the linked list and
|
||||
* removes the eldest entry if appropriate.
|
||||
*/
|
||||
void addEntry(int hash, K key, V value, int bucketIndex) {
|
||||
super.addEntry(hash, key, value, bucketIndex);
|
||||
@Override
|
||||
void addEntry(int hash, K key, V value, int bucketIndex, boolean checkIfNeedTree) {
|
||||
super.addEntry(hash, key, value, bucketIndex, checkIfNeedTree);
|
||||
|
||||
// Remove eldest entry if instructed
|
||||
Entry<K,V> eldest = header.after;
|
||||
@ -432,17 +416,14 @@ public class LinkedHashMap<K,V>
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This override differs from addEntry in that it doesn't resize the
|
||||
* table or remove the eldest entry.
|
||||
/*
|
||||
* Create a new LinkedHashMap.Entry and setup the before/after pointers
|
||||
*/
|
||||
void createEntry(int hash, K key, V value, int bucketIndex) {
|
||||
@SuppressWarnings("unchecked")
|
||||
HashMap.Entry<K,V> old = (HashMap.Entry<K,V>)table[bucketIndex];
|
||||
Entry<K,V> e = new Entry<>(hash, key, value, old);
|
||||
table[bucketIndex] = e;
|
||||
e.addBefore(header);
|
||||
size++;
|
||||
@Override
|
||||
HashMap.Entry<K,V> newEntry(int hash, K key, V value, Object next) {
|
||||
Entry<K,V> newEntry = new Entry<>(hash, key, value, next);
|
||||
newEntry.addBefore(header);
|
||||
return newEntry;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -187,11 +187,37 @@ public class WeakHashMap<K,V>
|
||||
*/
|
||||
int modCount;
|
||||
|
||||
private static class Holder {
|
||||
static final boolean USE_HASHSEED;
|
||||
|
||||
static {
|
||||
String hashSeedProp = java.security.AccessController.doPrivileged(
|
||||
new sun.security.action.GetPropertyAction(
|
||||
"jdk.map.useRandomSeed"));
|
||||
boolean localBool = (null != hashSeedProp)
|
||||
? Boolean.parseBoolean(hashSeedProp) : false;
|
||||
USE_HASHSEED = localBool;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A randomizing value associated with this instance that is applied to
|
||||
* hash code of keys to make hash collisions harder to find.
|
||||
*
|
||||
* Non-final so it can be set lazily, but be sure not to set more than once.
|
||||
*/
|
||||
transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this);
|
||||
transient int hashSeed;
|
||||
|
||||
/**
|
||||
* Initialize the hashing mask value.
|
||||
*/
|
||||
final void initHashSeed() {
|
||||
if (sun.misc.VM.isBooted() && Holder.USE_HASHSEED) {
|
||||
// Do not set hashSeed more than once!
|
||||
// assert hashSeed == 0;
|
||||
hashSeed = sun.misc.Hashing.randomHashSeed(this);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Entry<K,V>[] newTable(int n) {
|
||||
@ -223,6 +249,7 @@ public class WeakHashMap<K,V>
|
||||
table = newTable(capacity);
|
||||
this.loadFactor = loadFactor;
|
||||
threshold = (int)(capacity * loadFactor);
|
||||
initHashSeed();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -298,10 +325,7 @@ public class WeakHashMap<K,V>
|
||||
* in lower bits.
|
||||
*/
|
||||
final int hash(Object k) {
|
||||
if (k instanceof String) {
|
||||
return ((String) k).hash32();
|
||||
}
|
||||
int h = hashSeed ^ k.hashCode();
|
||||
int h = hashSeed ^ k.hashCode();
|
||||
|
||||
// This function ensures that hashCodes that differ only by
|
||||
// constant multiples at each bit position have a bounded
|
||||
|
@ -24,7 +24,7 @@
|
||||
*/
|
||||
package sun.misc;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
/**
|
||||
* Hashing utilities.
|
||||
@ -207,28 +207,16 @@ public class Hashing {
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds references to things that can't be initialized until after VM
|
||||
* is fully booted.
|
||||
* Return a non-zero 32-bit pseudo random value. The {@code instance} object
|
||||
* may be used as part of the value.
|
||||
*
|
||||
* @param instance an object to use if desired in choosing value.
|
||||
* @return a non-zero 32-bit pseudo random value.
|
||||
*/
|
||||
private static class Holder {
|
||||
|
||||
/**
|
||||
* Used for generating per-instance hash seeds.
|
||||
*
|
||||
* We try to improve upon the default seeding.
|
||||
*/
|
||||
static final Random SEED_MAKER = new Random(
|
||||
Double.doubleToRawLongBits(Math.random())
|
||||
^ System.identityHashCode(Hashing.class)
|
||||
^ System.currentTimeMillis()
|
||||
^ System.nanoTime()
|
||||
^ Runtime.getRuntime().freeMemory());
|
||||
}
|
||||
|
||||
public static int randomHashSeed(Object instance) {
|
||||
int seed;
|
||||
if (sun.misc.VM.isBooted()) {
|
||||
seed = Holder.SEED_MAKER.nextInt();
|
||||
seed = ThreadLocalRandom.current().nextInt();
|
||||
} else {
|
||||
// lower quality "random" seed value--still better than zero and not
|
||||
// not practically reversible.
|
||||
|
93
jdk/test/java/util/Map/CheckRandomHashSeed.java
Normal file
93
jdk/test/java/util/Map/CheckRandomHashSeed.java
Normal file
@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @test
|
||||
* @bug 8005698
|
||||
* @summary Check operation of jdk.map.useRandomSeed property
|
||||
* @run main CheckRandomHashSeed
|
||||
* @run main/othervm -Djdk.map.useRandomSeed=false CheckRandomHashSeed
|
||||
* @run main/othervm -Djdk.map.useRandomSeed=bogus CheckRandomHashSeed
|
||||
* @run main/othervm -Djdk.map.useRandomSeed=true CheckRandomHashSeed true
|
||||
* @author Brent Christian
|
||||
*/
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Hashtable;
|
||||
import java.util.WeakHashMap;
|
||||
|
||||
public class CheckRandomHashSeed {
|
||||
private final static String PROP_NAME = "jdk.map.useRandomSeed";
|
||||
static boolean expectRandom = false;
|
||||
|
||||
public static void main(String[] args) {
|
||||
if (args.length > 0 && args[0].equals("true")) {
|
||||
expectRandom = true;
|
||||
}
|
||||
String hashSeedProp = System.getProperty(PROP_NAME);
|
||||
boolean propSet = (null != hashSeedProp)
|
||||
? Boolean.parseBoolean(hashSeedProp) : false;
|
||||
if (expectRandom != propSet) {
|
||||
throw new Error("Error in test setup: " + (expectRandom ? "" : "not " ) + "expecting random hashSeed, but " + PROP_NAME + " is " + (propSet ? "" : "not ") + "enabled");
|
||||
}
|
||||
|
||||
testMap(new HashMap());
|
||||
testMap(new LinkedHashMap());
|
||||
testMap(new WeakHashMap());
|
||||
testMap(new Hashtable());
|
||||
}
|
||||
|
||||
private static void testMap(Map map) {
|
||||
int hashSeed = getHashSeed(map);
|
||||
boolean hashSeedIsZero = (hashSeed == 0);
|
||||
|
||||
if (expectRandom != hashSeedIsZero) {
|
||||
System.out.println("Test passed for " + map.getClass().getSimpleName() + " - expectRandom: " + expectRandom + ", hashSeed: " + hashSeed);
|
||||
} else {
|
||||
throw new Error ("Test FAILED for " + map.getClass().getSimpleName() + " - expectRandom: " + expectRandom + ", hashSeed: " + hashSeed);
|
||||
}
|
||||
}
|
||||
|
||||
private static int getHashSeed(Map map) {
|
||||
try {
|
||||
if (map instanceof HashMap || map instanceof LinkedHashMap) {
|
||||
map.put("Key", "Value");
|
||||
Field hashSeedField = HashMap.class.getDeclaredField("hashSeed");
|
||||
hashSeedField.setAccessible(true);
|
||||
int hashSeed = hashSeedField.getInt(map);
|
||||
return hashSeed;
|
||||
} else {
|
||||
map.put("Key", "Value");
|
||||
Field hashSeedField = map.getClass().getDeclaredField("hashSeed");
|
||||
hashSeedField.setAccessible(true);
|
||||
int hashSeed = hashSeedField.getInt(map);
|
||||
return hashSeed;
|
||||
}
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new Error(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@
|
||||
* @bug 7126277
|
||||
* @run main Collisions -shortrun
|
||||
* @run main/othervm -Djdk.map.althashing.threshold=0 Collisions -shortrun
|
||||
* @run main/othervm -Djdk.map.useRandomSeed=true Collisions -shortrun
|
||||
* @summary Ensure Maps behave well with lots of hashCode() collisions.
|
||||
* @author Mike Duigou
|
||||
*/
|
||||
|
665
jdk/test/java/util/Map/InPlaceOpsCollisions.java
Normal file
665
jdk/test/java/util/Map/InPlaceOpsCollisions.java
Normal file
@ -0,0 +1,665 @@
|
||||
/*
|
||||
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 8005698
|
||||
* @run main InPlaceOpsCollisions -shortrun
|
||||
* @run main/othervm -Djdk.map.randomseed=true InPlaceOpsCollisions -shortrun
|
||||
* @summary Ensure overrides of in-place operations in Maps behave well with lots of collisions.
|
||||
* @author Brent Christian
|
||||
*/
|
||||
import java.util.*;
|
||||
import java.util.function.*;
|
||||
|
||||
public class InPlaceOpsCollisions {
|
||||
|
||||
/**
|
||||
* Number of elements per map.
|
||||
*/
|
||||
private static final int TEST_SIZE = 5000;
|
||||
|
||||
final static class HashableInteger implements Comparable<HashableInteger> {
|
||||
|
||||
final int value;
|
||||
final int hashmask; //yes duplication
|
||||
|
||||
HashableInteger(int value, int hashmask) {
|
||||
this.value = value;
|
||||
this.hashmask = hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj instanceof HashableInteger) {
|
||||
HashableInteger other = (HashableInteger) obj;
|
||||
|
||||
return other.value == value;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return value % hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(HashableInteger o) {
|
||||
return value - o.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Integer.toString(value);
|
||||
}
|
||||
}
|
||||
|
||||
static HashableInteger EXTRA_INT_VAL;
|
||||
static String EXTRA_STRING_VAL;
|
||||
|
||||
private static Object[][] makeTestData(int size) {
|
||||
HashableInteger UNIQUE_OBJECTS[] = new HashableInteger[size];
|
||||
HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[size];
|
||||
String UNIQUE_STRINGS[] = new String[size];
|
||||
String COLLIDING_STRINGS[] = new String[size];
|
||||
|
||||
for (int i = 0; i < size; i++) {
|
||||
UNIQUE_OBJECTS[i] = new HashableInteger(i, Integer.MAX_VALUE);
|
||||
COLLIDING_OBJECTS[i] = new HashableInteger(i, 10);
|
||||
UNIQUE_STRINGS[i] = unhash(i);
|
||||
COLLIDING_STRINGS[i] = (0 == i % 2)
|
||||
? UNIQUE_STRINGS[i / 2]
|
||||
: "\u0000\u0000\u0000\u0000\u0000" + COLLIDING_STRINGS[i - 1];
|
||||
}
|
||||
EXTRA_INT_VAL = new HashableInteger(size, Integer.MAX_VALUE);
|
||||
EXTRA_STRING_VAL = new String ("Extra Value");
|
||||
|
||||
return new Object[][] {
|
||||
new Object[]{"Unique Objects", UNIQUE_OBJECTS},
|
||||
new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
|
||||
new Object[]{"Unique Strings", UNIQUE_STRINGS},
|
||||
new Object[]{"Colliding Strings", COLLIDING_STRINGS}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string with a hash equal to the argument.
|
||||
*
|
||||
* @return string with a hash equal to the argument.
|
||||
*/
|
||||
public static String unhash(int target) {
|
||||
StringBuilder answer = new StringBuilder();
|
||||
if (target < 0) {
|
||||
// String with hash of Integer.MIN_VALUE, 0x80000000
|
||||
answer.append("\\u0915\\u0009\\u001e\\u000c\\u0002");
|
||||
|
||||
if (target == Integer.MIN_VALUE) {
|
||||
return answer.toString();
|
||||
}
|
||||
// Find target without sign bit set
|
||||
target = target & Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
unhash0(answer, target);
|
||||
return answer.toString();
|
||||
}
|
||||
|
||||
private static void unhash0(StringBuilder partial, int target) {
|
||||
int div = target / 31;
|
||||
int rem = target % 31;
|
||||
|
||||
if (div <= Character.MAX_VALUE) {
|
||||
if (div != 0) {
|
||||
partial.append((char) div);
|
||||
}
|
||||
partial.append((char) rem);
|
||||
} else {
|
||||
unhash0(partial, div);
|
||||
partial.append((char) rem);
|
||||
}
|
||||
}
|
||||
|
||||
private static void realMain(String[] args) throws Throwable {
|
||||
boolean shortRun = args.length > 0 && args[0].equals("-shortrun");
|
||||
|
||||
Object[][] mapKeys = makeTestData(shortRun ? (TEST_SIZE / 2) : TEST_SIZE);
|
||||
|
||||
// loop through data sets
|
||||
for (Object[] keys_desc : mapKeys) {
|
||||
Map<Object, Object>[] maps = (Map<Object, Object>[]) new Map[]{
|
||||
new HashMap<>(),
|
||||
new LinkedHashMap<>(),
|
||||
};
|
||||
|
||||
// for each map type.
|
||||
for (Map<Object, Object> map : maps) {
|
||||
String desc = (String) keys_desc[0];
|
||||
Object[] keys = (Object[]) keys_desc[1];
|
||||
try {
|
||||
testInPlaceOps(map, desc, keys);
|
||||
} catch(Exception all) {
|
||||
unexpected("Failed for " + map.getClass().getName() + " with " + desc, all);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> void testInsertion(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
check("map empty", (map.size() == 0) && map.isEmpty());
|
||||
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
check(String.format("insertion: map expected size m%d != i%d", map.size(), i),
|
||||
map.size() == i);
|
||||
check(String.format("insertion: put(%s[%d])", keys_desc, i), null == map.put(keys[i], keys[i]));
|
||||
check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
check(String.format("insertion: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
|
||||
}
|
||||
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length),
|
||||
map.size() == keys.length);
|
||||
}
|
||||
|
||||
|
||||
private static <T> void testInPlaceOps(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
System.out.println(map.getClass() + " : " + keys_desc + ", testInPlaceOps");
|
||||
System.out.flush();
|
||||
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testPutIfAbsent(map, keys_desc, keys);
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testRemoveMapping(map, keys_desc, keys);
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testReplaceOldValue(map, keys_desc, keys);
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testReplaceIfMapped(map, keys_desc, keys);
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeIfAbsent(map, keys_desc, keys, (k) -> getExtraVal(keys[0]));
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeIfAbsent(map, keys_desc, keys, (k) -> null);
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeIfPresent(map, keys_desc, keys, (k, v) -> getExtraVal(keys[0]));
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeIfPresent(map, keys_desc, keys, (k, v) -> null);
|
||||
|
||||
if (!keys_desc.contains("Strings")) { // avoid parseInt() number format error
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeNonNull(map, keys_desc, keys);
|
||||
}
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testComputeNull(map, keys_desc, keys);
|
||||
|
||||
if (!keys_desc.contains("Strings")) { // avoid parseInt() number format error
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testMergeNonNull(map, keys_desc, keys);
|
||||
}
|
||||
|
||||
map.clear();
|
||||
testInsertion(map, keys_desc, keys);
|
||||
testMergeNull(map, keys_desc, keys);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static <T> void testPutIfAbsent(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
T retVal;
|
||||
removeOddKeys(map, keys);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
retVal = map.putIfAbsent(keys[i], extraVal);
|
||||
if (i % 2 == 0) { // even: not absent, not put
|
||||
check(String.format("putIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == keys[i]);
|
||||
check(String.format("putIfAbsent: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
|
||||
check(String.format("putIfAbsent: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
|
||||
} else { // odd: absent, was put
|
||||
check(String.format("putIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == null);
|
||||
check(String.format("putIfAbsent: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
check(String.format("putIfAbsent: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
}
|
||||
check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
}
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length),
|
||||
map.size() == keys.length);
|
||||
}
|
||||
|
||||
private static <T> void testRemoveMapping(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
boolean removed;
|
||||
int removes = 0;
|
||||
remapOddKeys(map, keys);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
removed = map.remove(keys[i], keys[i]);
|
||||
if (i % 2 == 0) { // even: original mapping, should be removed
|
||||
check(String.format("removeMapping: retVal(%s[%d])", keys_desc, i), removed);
|
||||
check(String.format("removeMapping: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
|
||||
check(String.format("removeMapping: !containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
check(String.format("removeMapping: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
removes++;
|
||||
} else { // odd: new mapping, not removed
|
||||
check(String.format("removeMapping: retVal(%s[%d])", keys_desc, i), !removed);
|
||||
check(String.format("removeMapping: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
check(String.format("removeMapping: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
check(String.format("removeMapping: containsValue(%s[%d])", keys_desc, i), map.containsValue(extraVal));
|
||||
}
|
||||
}
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
|
||||
map.size() == keys.length - removes);
|
||||
}
|
||||
|
||||
private static <T> void testReplaceOldValue(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remap odds to extraVal
|
||||
// call replace to replace for extraVal, for all keys
|
||||
// check that all keys map to value from keys array
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
boolean replaced;
|
||||
remapOddKeys(map, keys);
|
||||
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
replaced = map.replace(keys[i], extraVal, keys[i]);
|
||||
if (i % 2 == 0) { // even: original mapping, should not be replaced
|
||||
check(String.format("replaceOldValue: retVal(%s[%d])", keys_desc, i), !replaced);
|
||||
} else { // odd: new mapping, should be replaced
|
||||
check(String.format("replaceOldValue: get(%s[%d])", keys_desc, i), replaced);
|
||||
}
|
||||
check(String.format("replaceOldValue: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
|
||||
check(String.format("replaceOldValue: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
check(String.format("replaceOldValue: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
|
||||
// removes++;
|
||||
}
|
||||
check(String.format("replaceOldValue: !containsValue(%s[%s])", keys_desc, extraVal.toString()), !map.containsValue(extraVal));
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length),
|
||||
map.size() == keys.length);
|
||||
}
|
||||
|
||||
// TODO: Test case for key mapped to null value
|
||||
private static <T> void testReplaceIfMapped(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remove odd keys
|
||||
// call replace for all keys[]
|
||||
// odd keys should remain absent, even keys should be mapped to EXTRA, no value from keys[] should be in map
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
int expectedSize1 = 0;
|
||||
removeOddKeys(map, keys);
|
||||
int expectedSize2 = map.size();
|
||||
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
T retVal = map.replace(keys[i], extraVal);
|
||||
if (i % 2 == 0) { // even: still in map, should be replaced
|
||||
check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == keys[i]);
|
||||
check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
expectedSize1++;
|
||||
} else { // odd: was removed, should not be replaced
|
||||
check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == null);
|
||||
check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
|
||||
check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
}
|
||||
check(String.format("replaceIfMapped: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
}
|
||||
check(String.format("replaceIfMapped: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize1),
|
||||
map.size() == expectedSize1);
|
||||
check(String.format("map expected size#2 m%d != k%d", map.size(), expectedSize2),
|
||||
map.size() == expectedSize2);
|
||||
|
||||
}
|
||||
|
||||
private static <T> void testComputeIfAbsent(Map<T, T> map, String keys_desc, T[] keys,
|
||||
Function<T,T> mappingFunction) {
|
||||
// remove a third of the keys
|
||||
// call computeIfAbsent for all keys, func returns EXTRA
|
||||
// check that removed keys now -> EXTRA, other keys -> original val
|
||||
T expectedVal = mappingFunction.apply(keys[0]);
|
||||
T retVal;
|
||||
int expectedSize = 0;
|
||||
removeThirdKeys(map, keys);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
retVal = map.computeIfAbsent(keys[i], mappingFunction);
|
||||
if (i % 3 != 2) { // key present, not computed
|
||||
check(String.format("computeIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == keys[i]);
|
||||
check(String.format("computeIfAbsent: get(%s[%d])", keys_desc, i), keys[i] == map.get(keys[i]));
|
||||
check(String.format("computeIfAbsent: containsValue(%s[%d])", keys_desc, i), map.containsValue(keys[i]));
|
||||
check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
expectedSize++;
|
||||
} else { // key absent, computed unless function return null
|
||||
check(String.format("computeIfAbsent: (%s[%d]) retVal", keys_desc, i), retVal == expectedVal);
|
||||
check(String.format("computeIfAbsent: get(%s[%d])", keys_desc, i), expectedVal == map.get(keys[i]));
|
||||
check(String.format("computeIfAbsent: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
// mapping should not be added if function returns null
|
||||
check(String.format("insertion: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]) != (expectedVal == null));
|
||||
if (expectedVal != null) { expectedSize++; }
|
||||
}
|
||||
}
|
||||
if (expectedVal != null) {
|
||||
check(String.format("computeIfAbsent: containsValue(%s[%s])", keys_desc, expectedVal), map.containsValue(expectedVal));
|
||||
}
|
||||
check(String.format("map expected size m%d != k%d", map.size(), expectedSize),
|
||||
map.size() == expectedSize);
|
||||
}
|
||||
|
||||
private static <T> void testComputeIfPresent(Map<T, T> map, String keys_desc, T[] keys,
|
||||
BiFunction<T,T,T> mappingFunction) {
|
||||
// remove a third of the keys
|
||||
// call testComputeIfPresent for all keys[]
|
||||
// removed keys should remain absent, even keys should be mapped to $RESULT
|
||||
// no value from keys[] should be in map
|
||||
T funcResult = mappingFunction.apply(keys[0], keys[0]);
|
||||
int expectedSize1 = 0;
|
||||
removeThirdKeys(map, keys);
|
||||
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
T retVal = map.computeIfPresent(keys[i], mappingFunction);
|
||||
if (i % 3 != 2) { // key present
|
||||
if (funcResult == null) { // was removed
|
||||
check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
} else { // value was replaced
|
||||
check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
expectedSize1++;
|
||||
}
|
||||
check(String.format("computeIfPresent: retVal(%s[%s])", keys_desc, i), retVal == funcResult);
|
||||
check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), funcResult == map.get(keys[i]));
|
||||
|
||||
} else { // odd: was removed, should not be replaced
|
||||
check(String.format("replaceIfMapped: retVal(%s[%d])", keys_desc, i), retVal == null);
|
||||
check(String.format("replaceIfMapped: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
|
||||
check(String.format("replaceIfMapped: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
}
|
||||
check(String.format("replaceIfMapped: !containsValue(%s[%d])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
}
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize1),
|
||||
map.size() == expectedSize1);
|
||||
}
|
||||
|
||||
private static <T> void testComputeNonNull(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remove a third of the keys
|
||||
// call compute() for all keys[]
|
||||
// all keys should be present: removed keys -> EXTRA, others to k-1
|
||||
BiFunction<T,T,T> mappingFunction = (k, v) -> {
|
||||
if (v == null) {
|
||||
return getExtraVal(keys[0]);
|
||||
} else {
|
||||
return keys[Integer.parseInt(k.toString()) - 1];
|
||||
}
|
||||
};
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
removeThirdKeys(map, keys);
|
||||
for (int i = 1; i < keys.length; i++) {
|
||||
T retVal = map.compute(keys[i], mappingFunction);
|
||||
if (i % 3 != 2) { // key present, should be mapped to k-1
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == keys[i-1]);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), keys[i-1] == map.get(keys[i]));
|
||||
} else { // odd: was removed, should be replaced with EXTRA
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
}
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
}
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), keys.length),
|
||||
map.size() == keys.length);
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
|
||||
check(String.format("compute: !containsValue(%s,[null])", keys_desc), !map.containsValue(null));
|
||||
}
|
||||
|
||||
private static <T> void testComputeNull(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remove a third of the keys
|
||||
// call compute() for all keys[]
|
||||
// removed keys should -> EXTRA
|
||||
// for other keys: func returns null, should have no mapping
|
||||
BiFunction<T,T,T> mappingFunction = (k, v) -> {
|
||||
// if absent/null -> EXTRA
|
||||
// if present -> null
|
||||
if (v == null) {
|
||||
return getExtraVal(keys[0]);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
int expectedSize = 0;
|
||||
removeThirdKeys(map, keys);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
T retVal = map.compute(keys[i], mappingFunction);
|
||||
if (i % 3 != 2) { // key present, func returned null, should be absent from map
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == null);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
} else { // odd: was removed, should now be mapped to EXTRA
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
expectedSize++;
|
||||
}
|
||||
}
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize),
|
||||
map.size() == expectedSize);
|
||||
}
|
||||
|
||||
private static <T> void testMergeNonNull(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remove a third of the keys
|
||||
// call merge() for all keys[]
|
||||
// all keys should be present: removed keys now -> EXTRA, other keys -> k-1
|
||||
|
||||
// Map to preceding key
|
||||
BiFunction<T,T,T> mappingFunction = (k, v) -> keys[Integer.parseInt(k.toString()) - 1];
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
removeThirdKeys(map, keys);
|
||||
for (int i = 1; i < keys.length; i++) {
|
||||
T retVal = map.merge(keys[i], extraVal, mappingFunction);
|
||||
if (i % 3 != 2) { // key present, should be mapped to k-1
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == keys[i-1]);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), keys[i-1] == map.get(keys[i]));
|
||||
} else { // odd: was removed, should be replaced with EXTRA
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
}
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
}
|
||||
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), keys.length),
|
||||
map.size() == keys.length);
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
|
||||
check(String.format("compute: !containsValue(%s,[null])", keys_desc), !map.containsValue(null));
|
||||
|
||||
}
|
||||
|
||||
private static <T> void testMergeNull(Map<T, T> map, String keys_desc, T[] keys) {
|
||||
// remove a third of the keys
|
||||
// call merge() for all keys[]
|
||||
// result: removed keys -> EXTRA, other keys absent
|
||||
|
||||
BiFunction<T,T,T> mappingFunction = (k, v) -> null;
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
int expectedSize = 0;
|
||||
removeThirdKeys(map, keys);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
T retVal = map.merge(keys[i], extraVal, mappingFunction);
|
||||
if (i % 3 != 2) { // key present, func returned null, should be absent from map
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == null);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), null == map.get(keys[i]));
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), !map.containsKey(keys[i]));
|
||||
} else { // odd: was removed, should now be mapped to EXTRA
|
||||
check(String.format("compute: retVal(%s[%d])", keys_desc, i), retVal == extraVal);
|
||||
check(String.format("compute: get(%s[%d])", keys_desc, i), extraVal == map.get(keys[i]));
|
||||
check(String.format("compute: containsKey(%s[%d])", keys_desc, i), map.containsKey(keys[i]));
|
||||
expectedSize++;
|
||||
}
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, i), !map.containsValue(keys[i]));
|
||||
}
|
||||
check(String.format("compute: containsValue(%s[%s])", keys_desc, extraVal.toString()), map.containsValue(extraVal));
|
||||
check(String.format("map expected size#1 m%d != k%d", map.size(), expectedSize),
|
||||
map.size() == expectedSize);
|
||||
}
|
||||
|
||||
/*
|
||||
* Return the EXTRA val for the key type being used
|
||||
*/
|
||||
private static <T> T getExtraVal(T key) {
|
||||
if (key instanceof HashableInteger) {
|
||||
return (T)EXTRA_INT_VAL;
|
||||
} else {
|
||||
return (T)EXTRA_STRING_VAL;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Remove half of the keys
|
||||
*/
|
||||
private static <T> void removeOddKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
|
||||
int removes = 0;
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (i % 2 != 0) {
|
||||
map.remove(keys[i]);
|
||||
removes++;
|
||||
}
|
||||
}
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
|
||||
map.size() == keys.length - removes);
|
||||
}
|
||||
|
||||
/*
|
||||
* Remove every third key
|
||||
* This will hopefully leave some removed keys in TreeBins for, e.g., computeIfAbsent
|
||||
* w/ a func that returns null.
|
||||
*
|
||||
* TODO: consider using this in other tests (and maybe adding a remapThirdKeys)
|
||||
*/
|
||||
private static <T> void removeThirdKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
|
||||
int removes = 0;
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (i % 3 == 2) {
|
||||
map.remove(keys[i]);
|
||||
removes++;
|
||||
}
|
||||
}
|
||||
check(String.format("map expected size m%d != k%d", map.size(), keys.length - removes),
|
||||
map.size() == keys.length - removes);
|
||||
}
|
||||
|
||||
/*
|
||||
* Re-map the odd-numbered keys to map to the EXTRA value
|
||||
*/
|
||||
private static <T> void remapOddKeys(Map<T, T> map, /*String keys_desc, */ T[] keys) {
|
||||
T extraVal = getExtraVal(keys[0]);
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (i % 2 != 0) {
|
||||
map.put(keys[i], extraVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//--------------------- Infrastructure ---------------------------
|
||||
static volatile int passed = 0, failed = 0;
|
||||
|
||||
static void pass() {
|
||||
passed++;
|
||||
}
|
||||
|
||||
static void fail() {
|
||||
failed++;
|
||||
(new Error("Failure")).printStackTrace(System.err);
|
||||
}
|
||||
|
||||
static void fail(String msg) {
|
||||
failed++;
|
||||
(new Error("Failure: " + msg)).printStackTrace(System.err);
|
||||
}
|
||||
|
||||
static void abort() {
|
||||
fail();
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
static void abort(String msg) {
|
||||
fail(msg);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
static void unexpected(String msg, Throwable t) {
|
||||
System.err.println("Unexpected: " + msg);
|
||||
unexpected(t);
|
||||
}
|
||||
|
||||
static void unexpected(Throwable t) {
|
||||
failed++;
|
||||
t.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
static void check(boolean cond) {
|
||||
if (cond) {
|
||||
pass();
|
||||
} else {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
static void check(String desc, boolean cond) {
|
||||
if (cond) {
|
||||
pass();
|
||||
} else {
|
||||
fail(desc);
|
||||
}
|
||||
}
|
||||
|
||||
static void equal(Object x, Object y) {
|
||||
if (Objects.equals(x, y)) {
|
||||
pass();
|
||||
} else {
|
||||
fail(x + " not equal to " + y);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Throwable {
|
||||
Thread.currentThread().setName(Collisions.class.getName());
|
||||
// Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
|
||||
try {
|
||||
realMain(args);
|
||||
} catch (Throwable t) {
|
||||
unexpected(t);
|
||||
}
|
||||
|
||||
System.out.printf("%nPassed = %d, failed = %d%n%n", passed, failed);
|
||||
if (failed > 0) {
|
||||
throw new Error("Some tests failed");
|
||||
}
|
||||
}
|
||||
}
|
255
jdk/test/java/util/Map/TreeBinSplitBackToEntries.java
Normal file
255
jdk/test/java/util/Map/TreeBinSplitBackToEntries.java
Normal file
@ -0,0 +1,255 @@
|
||||
/*
|
||||
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
import java.util.*;
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 8005698
|
||||
* @summary Test the case where TreeBin.splitTreeBin() converts a bin back to an Entry list
|
||||
* @run main TreeBinSplitBackToEntries unused
|
||||
* @author Brent Christian
|
||||
*/
|
||||
|
||||
public class TreeBinSplitBackToEntries {
|
||||
private static int EXPECTED_TREE_THRESHOLD = 16;
|
||||
|
||||
// Easiest if this covers one bit higher then 'bit' in splitTreeBin() on the
|
||||
// call where the TreeBin is converted back to an Entry list
|
||||
private static int HASHMASK = 0x7F;
|
||||
private static boolean verbose = false;
|
||||
private static boolean fastFail = false;
|
||||
private static boolean failed = false;
|
||||
|
||||
static void printlnIfVerbose(String msg) {
|
||||
if (verbose) {System.out.println(msg); }
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
for (String arg : args) {
|
||||
switch(arg) {
|
||||
case "-verbose":
|
||||
verbose = true;
|
||||
break;
|
||||
case "-fastfail":
|
||||
fastFail = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
checkTreeThreshold();
|
||||
testMapHiTree();
|
||||
testMapLoTree();
|
||||
if (failed) {
|
||||
System.out.println("Test Failed");
|
||||
System.exit(1);
|
||||
} else {
|
||||
System.out.println("Test Passed");
|
||||
}
|
||||
}
|
||||
|
||||
public static void checkTreeThreshold() {
|
||||
int threshold = -1;
|
||||
try {
|
||||
Class treeBinClass = Class.forName("java.util.HashMap$TreeBin");
|
||||
Field treeThreshold = treeBinClass.getDeclaredField("TREE_THRESHOLD");
|
||||
treeThreshold.setAccessible(true);
|
||||
threshold = treeThreshold.getInt(treeBinClass);
|
||||
} catch (ClassNotFoundException|NoSuchFieldException|IllegalAccessException e) {
|
||||
e.printStackTrace();
|
||||
throw new Error("Problem accessing TreeBin.TREE_THRESHOLD", e);
|
||||
}
|
||||
check("Expected TREE_THRESHOLD: " + EXPECTED_TREE_THRESHOLD +", found: " + threshold,
|
||||
threshold == EXPECTED_TREE_THRESHOLD);
|
||||
printlnIfVerbose("TREE_THRESHOLD: " + threshold);
|
||||
}
|
||||
|
||||
public static void testMapHiTree() {
|
||||
Object[][] mapKeys = makeHiTreeTestData();
|
||||
testMapsForKeys(mapKeys, "hiTree");
|
||||
}
|
||||
|
||||
public static void testMapLoTree() {
|
||||
Object[][] mapKeys = makeLoTreeTestData();
|
||||
|
||||
testMapsForKeys(mapKeys, "loTree");
|
||||
}
|
||||
|
||||
public static void testMapsForKeys(Object[][] mapKeys, String desc) {
|
||||
// loop through data sets
|
||||
for (Object[] keys_desc : mapKeys) {
|
||||
Map<Object, Object>[] maps = (Map<Object, Object>[]) new Map[]{
|
||||
new HashMap<>(4, 0.8f),
|
||||
new LinkedHashMap<>(4, 0.8f),
|
||||
};
|
||||
// for each map type.
|
||||
for (Map<Object, Object> map : maps) {
|
||||
Object[] keys = (Object[]) keys_desc[1];
|
||||
System.out.println(desc + ": testPutThenGet() for " + map.getClass());
|
||||
testPutThenGet(map, keys);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> void testPutThenGet(Map<T, T> map, T[] keys) {
|
||||
for (T key : keys) {
|
||||
printlnIfVerbose("put()ing 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + ", hashCode=" + Integer.toHexString(key.hashCode()));
|
||||
map.put(key, key);
|
||||
}
|
||||
for (T key : keys) {
|
||||
check("key: 0x" + Integer.toHexString(Integer.parseInt(key.toString())) + " not found in resulting " + map.getClass().getSimpleName(), map.get(key) != null);
|
||||
}
|
||||
}
|
||||
|
||||
/* Data to force a non-empty loTree in TreeBin.splitTreeBin() to be converted back
|
||||
* into an Entry list
|
||||
*/
|
||||
private static Object[][] makeLoTreeTestData() {
|
||||
HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
|
||||
new HashableInteger( 0x23, HASHMASK),
|
||||
new HashableInteger( 0x123, HASHMASK),
|
||||
new HashableInteger( 0x323, HASHMASK),
|
||||
new HashableInteger( 0x523, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x723, HASHMASK),
|
||||
new HashableInteger( 0x923, HASHMASK),
|
||||
new HashableInteger( 0xB23, HASHMASK),
|
||||
new HashableInteger( 0xD23, HASHMASK),
|
||||
|
||||
new HashableInteger( 0xF23, HASHMASK),
|
||||
new HashableInteger( 0xF123, HASHMASK),
|
||||
new HashableInteger( 0x1023, HASHMASK),
|
||||
new HashableInteger( 0x1123, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x1323, HASHMASK),
|
||||
new HashableInteger( 0x1523, HASHMASK),
|
||||
new HashableInteger( 0x1723, HASHMASK),
|
||||
new HashableInteger( 0x1923, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x1B23, HASHMASK),
|
||||
new HashableInteger( 0x1D23, HASHMASK),
|
||||
new HashableInteger( 0x3123, HASHMASK),
|
||||
new HashableInteger( 0x3323, HASHMASK),
|
||||
new HashableInteger( 0x3523, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x3723, HASHMASK),
|
||||
new HashableInteger( 0x1001, HASHMASK),
|
||||
new HashableInteger( 0x4001, HASHMASK),
|
||||
new HashableInteger( 0x1, HASHMASK),
|
||||
};
|
||||
return new Object[][] {
|
||||
new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
|
||||
};
|
||||
}
|
||||
|
||||
/* Data to force the hiTree in TreeBin.splitTreeBin() to be converted back
|
||||
* into an Entry list
|
||||
*/
|
||||
private static Object[][] makeHiTreeTestData() {
|
||||
HashableInteger COLLIDING_OBJECTS[] = new HashableInteger[] {
|
||||
new HashableInteger( 0x1, HASHMASK),
|
||||
new HashableInteger( 0x101, HASHMASK),
|
||||
new HashableInteger( 0x301, HASHMASK),
|
||||
new HashableInteger( 0x501, HASHMASK),
|
||||
new HashableInteger( 0x701, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x1001, HASHMASK),
|
||||
new HashableInteger( 0x1101, HASHMASK),
|
||||
new HashableInteger( 0x1301, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x1501, HASHMASK),
|
||||
new HashableInteger( 0x1701, HASHMASK),
|
||||
new HashableInteger( 0x4001, HASHMASK),
|
||||
new HashableInteger( 0x4101, HASHMASK),
|
||||
new HashableInteger( 0x4301, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x4501, HASHMASK),
|
||||
new HashableInteger( 0x4701, HASHMASK),
|
||||
new HashableInteger( 0x8001, HASHMASK),
|
||||
new HashableInteger( 0x8101, HASHMASK),
|
||||
|
||||
|
||||
new HashableInteger( 0x8301, HASHMASK),
|
||||
new HashableInteger( 0x8501, HASHMASK),
|
||||
new HashableInteger( 0x8701, HASHMASK),
|
||||
new HashableInteger( 0x9001, HASHMASK),
|
||||
|
||||
new HashableInteger( 0x23, HASHMASK),
|
||||
new HashableInteger( 0x123, HASHMASK),
|
||||
new HashableInteger( 0x323, HASHMASK),
|
||||
new HashableInteger( 0x523, HASHMASK),
|
||||
};
|
||||
return new Object[][] {
|
||||
new Object[]{"Colliding Objects", COLLIDING_OBJECTS},
|
||||
};
|
||||
}
|
||||
|
||||
static void check(String desc, boolean cond) {
|
||||
if (!cond) {
|
||||
fail(desc);
|
||||
}
|
||||
}
|
||||
|
||||
static void fail(String msg) {
|
||||
failed = true;
|
||||
(new Error("Failure: " + msg)).printStackTrace(System.err);
|
||||
if (fastFail) {
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
final static class HashableInteger implements Comparable<HashableInteger> {
|
||||
final int value;
|
||||
final int hashmask; //yes duplication
|
||||
|
||||
HashableInteger(int value, int hashmask) {
|
||||
this.value = value;
|
||||
this.hashmask = hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj instanceof HashableInteger) {
|
||||
HashableInteger other = (HashableInteger) obj;
|
||||
return other.value == value;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// This version ANDs the mask
|
||||
return value & hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(HashableInteger o) {
|
||||
return value - o.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Integer.toString(value);
|
||||
}
|
||||
}
|
||||
}
|
707
jdk/test/java/util/Spliterator/SpliteratorCollisions.java
Normal file
707
jdk/test/java/util/Spliterator/SpliteratorCollisions.java
Normal file
@ -0,0 +1,707 @@
|
||||
/*
|
||||
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @test
|
||||
* @bug 8005698
|
||||
* @run testng SpliteratorCollisions
|
||||
* @summary Spliterator traversing and splitting hash maps containing colliding hashes
|
||||
* @author Brent Christian
|
||||
*/
|
||||
|
||||
import org.testng.annotations.DataProvider;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Deque;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Spliterator;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongConsumer;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
import static org.testng.Assert.*;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
|
||||
@Test
|
||||
public class SpliteratorCollisions {
|
||||
|
||||
private static List<Integer> SIZES = Arrays.asList(0, 1, 10, 100, 1000);
|
||||
|
||||
private static class SpliteratorDataBuilder<T> {
|
||||
List<Object[]> data;
|
||||
List<T> exp;
|
||||
Map<T, T> mExp;
|
||||
|
||||
SpliteratorDataBuilder(List<Object[]> data, List<T> exp) {
|
||||
this.data = data;
|
||||
this.exp = exp;
|
||||
this.mExp = createMap(exp);
|
||||
}
|
||||
|
||||
Map<T, T> createMap(List<T> l) {
|
||||
Map<T, T> m = new LinkedHashMap<>();
|
||||
for (T t : l) {
|
||||
m.put(t, t);
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
void add(String description, Collection<?> expected, Supplier<Spliterator<?>> s) {
|
||||
description = joiner(description).toString();
|
||||
data.add(new Object[]{description, expected, s});
|
||||
}
|
||||
|
||||
void add(String description, Supplier<Spliterator<?>> s) {
|
||||
add(description, exp, s);
|
||||
}
|
||||
|
||||
void addCollection(Function<Collection<T>, ? extends Collection<T>> c) {
|
||||
add("new " + c.apply(Collections.<T>emptyList()).getClass().getName() + ".spliterator()",
|
||||
() -> c.apply(exp).spliterator());
|
||||
}
|
||||
|
||||
void addList(Function<Collection<T>, ? extends List<T>> l) {
|
||||
// @@@ If collection is instance of List then add sub-list tests
|
||||
addCollection(l);
|
||||
}
|
||||
|
||||
void addMap(Function<Map<T, T>, ? extends Map<T, T>> m) {
|
||||
String description = "new " + m.apply(Collections.<T, T>emptyMap()).getClass().getName();
|
||||
add(description + ".keySet().spliterator()", () -> m.apply(mExp).keySet().spliterator());
|
||||
add(description + ".values().spliterator()", () -> m.apply(mExp).values().spliterator());
|
||||
add(description + ".entrySet().spliterator()", mExp.entrySet(), () -> m.apply(mExp).entrySet().spliterator());
|
||||
}
|
||||
|
||||
StringBuilder joiner(String description) {
|
||||
return new StringBuilder(description).
|
||||
append(" {").
|
||||
append("size=").append(exp.size()).
|
||||
append("}");
|
||||
}
|
||||
}
|
||||
|
||||
static Object[][] spliteratorDataProvider;
|
||||
|
||||
@DataProvider(name = "HashableIntSpliterator")
|
||||
public static Object[][] spliteratorDataProvider() {
|
||||
if (spliteratorDataProvider != null) {
|
||||
return spliteratorDataProvider;
|
||||
}
|
||||
|
||||
List<Object[]> data = new ArrayList<>();
|
||||
for (int size : SIZES) {
|
||||
List<HashableInteger> exp = listIntRange(size, false);
|
||||
SpliteratorDataBuilder<HashableInteger> db = new SpliteratorDataBuilder<>(data, exp);
|
||||
|
||||
// Maps
|
||||
db.addMap(HashMap::new);
|
||||
db.addMap(LinkedHashMap::new);
|
||||
|
||||
// Collections that use HashMap
|
||||
db.addCollection(HashSet::new);
|
||||
db.addCollection(LinkedHashSet::new);
|
||||
db.addCollection(TreeSet::new);
|
||||
}
|
||||
return spliteratorDataProvider = data.toArray(new Object[0][]);
|
||||
}
|
||||
|
||||
static Object[][] spliteratorDataProviderWithNull;
|
||||
|
||||
@DataProvider(name = "HashableIntSpliteratorWithNull")
|
||||
public static Object[][] spliteratorNullDataProvider() {
|
||||
if (spliteratorDataProviderWithNull != null) {
|
||||
return spliteratorDataProviderWithNull;
|
||||
}
|
||||
|
||||
List<Object[]> data = new ArrayList<>();
|
||||
for (int size : SIZES) {
|
||||
List<HashableInteger> exp = listIntRange(size, true);
|
||||
exp.add(0, null);
|
||||
SpliteratorDataBuilder<HashableInteger> db = new SpliteratorDataBuilder<>(data, exp);
|
||||
|
||||
// Maps
|
||||
db.addMap(HashMap::new);
|
||||
db.addMap(LinkedHashMap::new);
|
||||
// TODO: add this back in if we decide to keep TreeBin in WeakHashMap
|
||||
//db.addMap(WeakHashMap::new);
|
||||
|
||||
// Collections that use HashMap
|
||||
db.addCollection(HashSet::new);
|
||||
db.addCollection(LinkedHashSet::new);
|
||||
// db.addCollection(TreeSet::new);
|
||||
|
||||
}
|
||||
return spliteratorDataProviderWithNull = data.toArray(new Object[0][]);
|
||||
}
|
||||
|
||||
final static class HashableInteger implements Comparable<HashableInteger> {
|
||||
|
||||
final int value;
|
||||
final int hashmask; //yes duplication
|
||||
|
||||
HashableInteger(int value, int hashmask) {
|
||||
this.value = value;
|
||||
this.hashmask = hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj instanceof HashableInteger) {
|
||||
HashableInteger other = (HashableInteger) obj;
|
||||
|
||||
return other.value == value;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return value % hashmask;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(HashableInteger o) {
|
||||
return value - o.value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Integer.toString(value);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<HashableInteger> listIntRange(int upTo, boolean withNull) {
|
||||
List<HashableInteger> exp = new ArrayList<>();
|
||||
if (withNull) {
|
||||
exp.add(null);
|
||||
}
|
||||
for (int i = 0; i < upTo; i++) {
|
||||
exp.add(new HashableInteger(i, 10));
|
||||
}
|
||||
return Collections.unmodifiableList(exp);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testNullPointerException(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
|
||||
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testNullPointerExceptionWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
executeAndCatch(NullPointerException.class, () -> s.get().forEachRemaining(null));
|
||||
executeAndCatch(NullPointerException.class, () -> s.get().tryAdvance(null));
|
||||
}
|
||||
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testForEach(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testForEach(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testForEachWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testForEach(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testTryAdvance(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testTryAdvance(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testTryAdvanceWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testTryAdvance(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
/* skip this test until 8013649 is fixed
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testMixedTryAdvanceForEach(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testMixedTryAdvanceForEach(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testMixedTryAdvanceForEachWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testMixedTryAdvanceForEach(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
*/
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitAfterFullTraversal(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitAfterFullTraversal(s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitAfterFullTraversalWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitAfterFullTraversal(s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitOnce(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitOnce(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitOnceWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitOnce(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitSixDeep(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitSixDeep(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitSixDeepWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitSixDeep(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliterator")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitUntilNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitUntilNull(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "HashableIntSpliteratorWithNull")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testSplitUntilNullWithNull(String description, Collection exp, Supplier<Spliterator> s) {
|
||||
testSplitUntilNull(exp, s, (Consumer<Object> b) -> b);
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testForEach(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
S spliterator = supplier.get();
|
||||
long sizeIfKnown = spliterator.getExactSizeIfKnown();
|
||||
boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
|
||||
|
||||
ArrayList<T> fromForEach = new ArrayList<>();
|
||||
spliterator = supplier.get();
|
||||
Consumer<T> addToFromForEach = boxingAdapter.apply(fromForEach::add);
|
||||
spliterator.forEachRemaining(addToFromForEach);
|
||||
|
||||
// Assert that forEach now produces no elements
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
|
||||
// Assert that tryAdvance now produce no elements
|
||||
spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
|
||||
|
||||
// assert that size, tryAdvance, and forEach are consistent
|
||||
if (sizeIfKnown >= 0) {
|
||||
assertEquals(sizeIfKnown, exp.size());
|
||||
}
|
||||
if (exp.contains(null)) {
|
||||
assertTrue(fromForEach.contains(null));
|
||||
}
|
||||
assertEquals(fromForEach.size(), exp.size());
|
||||
|
||||
assertContents(fromForEach, exp, isOrdered);
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testTryAdvance(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
S spliterator = supplier.get();
|
||||
long sizeIfKnown = spliterator.getExactSizeIfKnown();
|
||||
boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
|
||||
|
||||
spliterator = supplier.get();
|
||||
ArrayList<T> fromTryAdvance = new ArrayList<>();
|
||||
Consumer<T> addToFromTryAdvance = boxingAdapter.apply(fromTryAdvance::add);
|
||||
while (spliterator.tryAdvance(addToFromTryAdvance)) { }
|
||||
|
||||
// Assert that forEach now produces no elements
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
|
||||
// Assert that tryAdvance now produce no elements
|
||||
spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
|
||||
|
||||
// assert that size, tryAdvance, and forEach are consistent
|
||||
if (sizeIfKnown >= 0) {
|
||||
assertEquals(sizeIfKnown, exp.size());
|
||||
}
|
||||
assertEquals(fromTryAdvance.size(), exp.size());
|
||||
|
||||
assertContents(fromTryAdvance, exp, isOrdered);
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testMixedTryAdvanceForEach(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
S spliterator = supplier.get();
|
||||
long sizeIfKnown = spliterator.getExactSizeIfKnown();
|
||||
boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
|
||||
|
||||
// tryAdvance first few elements, then forEach rest
|
||||
ArrayList<T> dest = new ArrayList<>();
|
||||
spliterator = supplier.get();
|
||||
Consumer<T> addToDest = boxingAdapter.apply(dest::add);
|
||||
for (int i = 0; i < 10 && spliterator.tryAdvance(addToDest); i++) { }
|
||||
spliterator.forEachRemaining(addToDest);
|
||||
|
||||
// Assert that forEach now produces no elements
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
|
||||
// Assert that tryAdvance now produce no elements
|
||||
spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
|
||||
|
||||
if (sizeIfKnown >= 0) {
|
||||
assertEquals(sizeIfKnown, dest.size());
|
||||
}
|
||||
assertEquals(dest.size(), exp.size());
|
||||
|
||||
if (isOrdered) {
|
||||
assertEquals(dest, exp);
|
||||
}
|
||||
else {
|
||||
assertContentsUnordered(dest, exp);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testSplitAfterFullTraversal(
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
// Full traversal using tryAdvance
|
||||
Spliterator<T> spliterator = supplier.get();
|
||||
while (spliterator.tryAdvance(boxingAdapter.apply(e -> { }))) { }
|
||||
Spliterator<T> split = spliterator.trySplit();
|
||||
assertNull(split);
|
||||
|
||||
// Full traversal using forEach
|
||||
spliterator = supplier.get();
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> {
|
||||
}));
|
||||
split = spliterator.trySplit();
|
||||
assertNull(split);
|
||||
|
||||
// Full traversal using tryAdvance then forEach
|
||||
spliterator = supplier.get();
|
||||
spliterator.tryAdvance(boxingAdapter.apply(e -> { }));
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> {
|
||||
}));
|
||||
split = spliterator.trySplit();
|
||||
assertNull(split);
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testSplitOnce(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
S spliterator = supplier.get();
|
||||
long sizeIfKnown = spliterator.getExactSizeIfKnown();
|
||||
boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
|
||||
|
||||
ArrayList<T> fromSplit = new ArrayList<>();
|
||||
Spliterator<T> s1 = supplier.get();
|
||||
Spliterator<T> s2 = s1.trySplit();
|
||||
long s1Size = s1.getExactSizeIfKnown();
|
||||
long s2Size = (s2 != null) ? s2.getExactSizeIfKnown() : 0;
|
||||
|
||||
Consumer<T> addToFromSplit = boxingAdapter.apply(fromSplit::add);
|
||||
if (s2 != null)
|
||||
s2.forEachRemaining(addToFromSplit);
|
||||
s1.forEachRemaining(addToFromSplit);
|
||||
|
||||
if (sizeIfKnown >= 0) {
|
||||
assertEquals(sizeIfKnown, fromSplit.size());
|
||||
if (s1Size >= 0 && s2Size >= 0)
|
||||
assertEquals(sizeIfKnown, s1Size + s2Size);
|
||||
}
|
||||
assertContents(fromSplit, exp, isOrdered);
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testSplitSixDeep(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
S spliterator = supplier.get();
|
||||
boolean isOrdered = spliterator.hasCharacteristics(Spliterator.ORDERED);
|
||||
|
||||
for (int depth=0; depth < 6; depth++) {
|
||||
List<T> dest = new ArrayList<>();
|
||||
spliterator = supplier.get();
|
||||
|
||||
assertSpliterator(spliterator);
|
||||
|
||||
// verify splitting with forEach
|
||||
visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), false);
|
||||
assertContents(dest, exp, isOrdered);
|
||||
|
||||
// verify splitting with tryAdvance
|
||||
dest.clear();
|
||||
spliterator = supplier.get();
|
||||
visit(depth, 0, dest, spliterator, boxingAdapter, spliterator.characteristics(), true);
|
||||
assertContents(dest, exp, isOrdered);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void visit(int depth, int curLevel,
|
||||
List<T> dest, S spliterator, UnaryOperator<Consumer<T>> boxingAdapter,
|
||||
int rootCharacteristics, boolean useTryAdvance) {
|
||||
if (curLevel < depth) {
|
||||
long beforeSize = spliterator.getExactSizeIfKnown();
|
||||
Spliterator<T> split = spliterator.trySplit();
|
||||
if (split != null) {
|
||||
assertSpliterator(split, rootCharacteristics);
|
||||
assertSpliterator(spliterator, rootCharacteristics);
|
||||
|
||||
if ((rootCharacteristics & Spliterator.SUBSIZED) != 0 &&
|
||||
(rootCharacteristics & Spliterator.SIZED) != 0) {
|
||||
assertEquals(beforeSize, split.estimateSize() + spliterator.estimateSize());
|
||||
}
|
||||
visit(depth, curLevel + 1, dest, split, boxingAdapter, rootCharacteristics, useTryAdvance);
|
||||
}
|
||||
visit(depth, curLevel + 1, dest, spliterator, boxingAdapter, rootCharacteristics, useTryAdvance);
|
||||
}
|
||||
else {
|
||||
long sizeIfKnown = spliterator.getExactSizeIfKnown();
|
||||
if (useTryAdvance) {
|
||||
Consumer<T> addToDest = boxingAdapter.apply(dest::add);
|
||||
int count = 0;
|
||||
while (spliterator.tryAdvance(addToDest)) {
|
||||
++count;
|
||||
}
|
||||
|
||||
if (sizeIfKnown >= 0)
|
||||
assertEquals(sizeIfKnown, count);
|
||||
|
||||
// Assert that forEach now produces no elements
|
||||
spliterator.forEachRemaining(boxingAdapter.apply(e -> fail("Spliterator.forEach produced an element after spliterator exhausted: " + e)));
|
||||
|
||||
Spliterator<T> split = spliterator.trySplit();
|
||||
assertNull(split);
|
||||
}
|
||||
else {
|
||||
List<T> leafDest = new ArrayList<>();
|
||||
Consumer<T> addToLeafDest = boxingAdapter.apply(leafDest::add);
|
||||
spliterator.forEachRemaining(addToLeafDest);
|
||||
|
||||
if (sizeIfKnown >= 0)
|
||||
assertEquals(sizeIfKnown, leafDest.size());
|
||||
|
||||
// Assert that forEach now produces no elements
|
||||
spliterator.tryAdvance(boxingAdapter.apply(e -> fail("Spliterator.tryAdvance produced an element after spliterator exhausted: " + e)));
|
||||
|
||||
Spliterator<T> split = spliterator.trySplit();
|
||||
assertNull(split);
|
||||
|
||||
dest.addAll(leafDest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static <T, S extends Spliterator<T>> void testSplitUntilNull(
|
||||
Collection<T> exp,
|
||||
Supplier<S> supplier,
|
||||
UnaryOperator<Consumer<T>> boxingAdapter) {
|
||||
Spliterator<T> s = supplier.get();
|
||||
boolean isOrdered = s.hasCharacteristics(Spliterator.ORDERED);
|
||||
assertSpliterator(s);
|
||||
|
||||
List<T> splits = new ArrayList<>();
|
||||
Consumer<T> c = boxingAdapter.apply(splits::add);
|
||||
|
||||
testSplitUntilNull(new SplitNode<T>(c, s));
|
||||
assertContents(splits, exp, isOrdered);
|
||||
}
|
||||
|
||||
private static class SplitNode<T> {
|
||||
// Constant for every node
|
||||
final Consumer<T> c;
|
||||
final int rootCharacteristics;
|
||||
|
||||
final Spliterator<T> s;
|
||||
|
||||
SplitNode(Consumer<T> c, Spliterator<T> s) {
|
||||
this(c, s.characteristics(), s);
|
||||
}
|
||||
|
||||
private SplitNode(Consumer<T> c, int rootCharacteristics, Spliterator<T> s) {
|
||||
this.c = c;
|
||||
this.rootCharacteristics = rootCharacteristics;
|
||||
this.s = s;
|
||||
}
|
||||
|
||||
SplitNode<T> fromSplit(Spliterator<T> split) {
|
||||
return new SplitNode<>(c, rootCharacteristics, split);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum stack capacity to 0.25MB. This should be more than enough to detect a bad spliterator
|
||||
* while not unduly disrupting test infrastructure given the test data sizes that are used are small.
|
||||
* Note that j.u.c.ForkJoinPool sets the max queue size to 64M (1 << 26).
|
||||
*/
|
||||
private static final int MAXIMUM_STACK_CAPACITY = 1 << 18; // 0.25MB
|
||||
|
||||
private static <T> void testSplitUntilNull(SplitNode<T> e) {
|
||||
// Use an explicit stack to avoid a StackOverflowException when testing a Spliterator
|
||||
// that when repeatedly split produces a right-balanced (and maybe degenerate) tree, or
|
||||
// for a spliterator that is badly behaved.
|
||||
Deque<SplitNode<T>> stack = new ArrayDeque<>();
|
||||
stack.push(e);
|
||||
|
||||
int iteration = 0;
|
||||
while (!stack.isEmpty()) {
|
||||
assertTrue(iteration++ < MAXIMUM_STACK_CAPACITY, "Exceeded maximum stack modification count of 1 << 18");
|
||||
|
||||
e = stack.pop();
|
||||
Spliterator<T> parentAndRightSplit = e.s;
|
||||
|
||||
long parentEstimateSize = parentAndRightSplit.estimateSize();
|
||||
assertTrue(parentEstimateSize >= 0,
|
||||
String.format("Split size estimate %d < 0", parentEstimateSize));
|
||||
|
||||
long parentSize = parentAndRightSplit.getExactSizeIfKnown();
|
||||
Spliterator<T> leftSplit = parentAndRightSplit.trySplit();
|
||||
if (leftSplit == null) {
|
||||
parentAndRightSplit.forEachRemaining(e.c);
|
||||
continue;
|
||||
}
|
||||
|
||||
assertSpliterator(leftSplit, e.rootCharacteristics);
|
||||
assertSpliterator(parentAndRightSplit, e.rootCharacteristics);
|
||||
|
||||
if (parentEstimateSize != Long.MAX_VALUE && leftSplit.estimateSize() > 0 && parentAndRightSplit.estimateSize() > 0) {
|
||||
assertTrue(leftSplit.estimateSize() < parentEstimateSize,
|
||||
String.format("Left split size estimate %d >= parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
|
||||
assertTrue(parentAndRightSplit.estimateSize() < parentEstimateSize,
|
||||
String.format("Right split size estimate %d >= parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
|
||||
}
|
||||
else {
|
||||
assertTrue(leftSplit.estimateSize() <= parentEstimateSize,
|
||||
String.format("Left split size estimate %d > parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
|
||||
assertTrue(parentAndRightSplit.estimateSize() <= parentEstimateSize,
|
||||
String.format("Right split size estimate %d > parent split size estimate %d", leftSplit.estimateSize(), parentEstimateSize));
|
||||
}
|
||||
|
||||
long leftSize = leftSplit.getExactSizeIfKnown();
|
||||
long rightSize = parentAndRightSplit.getExactSizeIfKnown();
|
||||
if (parentSize >= 0 && leftSize >= 0 && rightSize >= 0)
|
||||
assertEquals(parentSize, leftSize + rightSize,
|
||||
String.format("exact left split size %d + exact right split size %d != parent exact split size %d",
|
||||
leftSize, rightSize, parentSize));
|
||||
|
||||
// Add right side to stack first so left side is popped off first
|
||||
stack.push(e.fromSplit(parentAndRightSplit));
|
||||
stack.push(e.fromSplit(leftSplit));
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertSpliterator(Spliterator<?> s, int rootCharacteristics) {
|
||||
if ((rootCharacteristics & Spliterator.SUBSIZED) != 0) {
|
||||
assertTrue(s.hasCharacteristics(Spliterator.SUBSIZED),
|
||||
"Child split is not SUBSIZED when root split is SUBSIZED");
|
||||
}
|
||||
assertSpliterator(s);
|
||||
}
|
||||
|
||||
private static void assertSpliterator(Spliterator<?> s) {
|
||||
if (s.hasCharacteristics(Spliterator.SUBSIZED)) {
|
||||
assertTrue(s.hasCharacteristics(Spliterator.SIZED));
|
||||
}
|
||||
if (s.hasCharacteristics(Spliterator.SIZED)) {
|
||||
assertTrue(s.estimateSize() != Long.MAX_VALUE);
|
||||
assertTrue(s.getExactSizeIfKnown() >= 0);
|
||||
}
|
||||
try {
|
||||
s.getComparator();
|
||||
assertTrue(s.hasCharacteristics(Spliterator.SORTED));
|
||||
} catch (IllegalStateException e) {
|
||||
assertFalse(s.hasCharacteristics(Spliterator.SORTED));
|
||||
}
|
||||
}
|
||||
|
||||
private static<T> void assertContents(Collection<T> actual, Collection<T> expected, boolean isOrdered) {
|
||||
if (isOrdered) {
|
||||
assertEquals(actual, expected);
|
||||
}
|
||||
else {
|
||||
assertContentsUnordered(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
private static<T> void assertContentsUnordered(Iterable<T> actual, Iterable<T> expected) {
|
||||
assertEquals(toBoxedMultiset(actual), toBoxedMultiset(expected));
|
||||
}
|
||||
|
||||
private static <T> Map<T, HashableInteger> toBoxedMultiset(Iterable<T> c) {
|
||||
Map<T, HashableInteger> result = new HashMap<>();
|
||||
c.forEach((Consumer) e -> {
|
||||
if (result.containsKey((T)e)) {
|
||||
result.put((T)e, new HashableInteger(((HashableInteger)result.get(e)).value + 1, 10));
|
||||
} else {
|
||||
result.put((T)e, new HashableInteger(1, 10));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
private void executeAndCatch(Class<? extends Exception> expected, Runnable r) {
|
||||
Exception caught = null;
|
||||
try {
|
||||
r.run();
|
||||
}
|
||||
catch (Exception e) {
|
||||
caught = e;
|
||||
}
|
||||
|
||||
assertNotNull(caught,
|
||||
String.format("No Exception was thrown, expected an Exception of %s to be thrown",
|
||||
expected.getName()));
|
||||
assertTrue(expected.isInstance(caught),
|
||||
String.format("Exception thrown %s not an instance of %s",
|
||||
caught.getClass().getName(), expected.getName()));
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue
Block a user