8011199: Backout changeset JDK-7143928 (2b34a1eb3153)
Reviewed-by: darcy, dholmes
This commit is contained in:
parent
12f1183c74
commit
7d45058eaa
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -104,11 +104,6 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
{
|
||||
private static final long serialVersionUID = 8683452581122892189L;
|
||||
|
||||
/**
|
||||
* Shared empty array instance used for empty instances.
|
||||
*/
|
||||
private static final Object EMPTY_ELEMENTDATA[] = new Object[0];
|
||||
|
||||
/**
|
||||
* The array buffer into which the elements of the ArrayList are stored.
|
||||
* The capacity of the ArrayList is the length of this array buffer.
|
||||
@ -141,8 +136,7 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
* Constructs an empty list with an initial capacity of ten.
|
||||
*/
|
||||
public ArrayList() {
|
||||
super();
|
||||
this.elementData = EMPTY_ELEMENTDATA;
|
||||
this(10);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -168,7 +162,8 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
*/
|
||||
public void trimToSize() {
|
||||
modCount++;
|
||||
if (size < elementData.length) {
|
||||
int oldCapacity = elementData.length;
|
||||
if (size < oldCapacity) {
|
||||
elementData = Arrays.copyOf(elementData, size);
|
||||
}
|
||||
}
|
||||
@ -182,20 +177,11 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
*/
|
||||
public void ensureCapacity(int minCapacity) {
|
||||
if (minCapacity > 0)
|
||||
ensureExplicitCapacity(minCapacity);
|
||||
ensureCapacityInternal(minCapacity);
|
||||
}
|
||||
|
||||
private void ensureCapacityInternal(int minCapacity) {
|
||||
if(elementData == EMPTY_ELEMENTDATA) {
|
||||
minCapacity = Math.max(10, minCapacity);
|
||||
}
|
||||
|
||||
ensureExplicitCapacity(minCapacity);
|
||||
}
|
||||
|
||||
private void ensureExplicitCapacity(int minCapacity) {
|
||||
modCount++;
|
||||
|
||||
// overflow-conscious code
|
||||
if (minCapacity - elementData.length > 0)
|
||||
grow(minCapacity);
|
||||
@ -520,7 +506,8 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
modCount++;
|
||||
|
||||
// Let gc do its work
|
||||
Arrays.fill(elementData, null);
|
||||
for (int i = 0; i < size; i++)
|
||||
elementData[i] = null;
|
||||
|
||||
size = 0;
|
||||
}
|
||||
@ -601,8 +588,8 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
|
||||
// Let gc do its work
|
||||
int newSize = size - (toIndex-fromIndex);
|
||||
Arrays.fill(elementData, newSize, size, null);
|
||||
size = newSize;
|
||||
while (size != newSize)
|
||||
elementData[--size] = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -690,8 +677,8 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
w += size - r;
|
||||
}
|
||||
if (w != size) {
|
||||
// Let gc do its work
|
||||
Arrays.fill(elementData, w, size, null);
|
||||
for (int i = w; i < size; i++)
|
||||
elementData[i] = null;
|
||||
modCount += size - w;
|
||||
size = w;
|
||||
modified = true;
|
||||
@ -715,7 +702,7 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
s.defaultWriteObject();
|
||||
|
||||
// Write out array length
|
||||
s.writeInt((elementData == EMPTY_ELEMENTDATA) ? 10 : elementData.length);
|
||||
s.writeInt(elementData.length);
|
||||
|
||||
// Write out all elements in the proper order.
|
||||
for (int i=0; i<size; i++)
|
||||
@ -724,6 +711,7 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
if (modCount != expectedModCount) {
|
||||
throw new ConcurrentModificationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -735,16 +723,10 @@ public class ArrayList<E> extends AbstractList<E>
|
||||
// Read in size, and any hidden stuff
|
||||
s.defaultReadObject();
|
||||
|
||||
// Read in array length
|
||||
int initialCapacity = s.readInt();
|
||||
elementData = EMPTY_ELEMENTDATA;
|
||||
// Read in array length and allocate array
|
||||
int arrayLength = s.readInt();
|
||||
Object[] a = elementData = new Object[arrayLength];
|
||||
|
||||
if((size > 0) || (initialCapacity != 10)) {
|
||||
// allocate array based upon size.
|
||||
ensureCapacityInternal(size);
|
||||
}
|
||||
|
||||
Object[] a = elementData;
|
||||
// Read in all elements in the proper order.
|
||||
for (int i=0; i<size; i++)
|
||||
a[i] = s.readObject();
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -143,15 +143,10 @@ public class HashMap<K,V>
|
||||
*/
|
||||
static final float DEFAULT_LOAD_FACTOR = 0.75f;
|
||||
|
||||
/**
|
||||
* An empty table instance to share when the table is not inflated.
|
||||
*/
|
||||
static final Entry<?,?>[] EMPTY_TABLE = {};
|
||||
|
||||
/**
|
||||
* The table, resized as necessary. Length MUST Always be a power of two.
|
||||
*/
|
||||
transient Entry<?,?>[] table = EMPTY_TABLE;
|
||||
transient Entry<?,?>[] table;
|
||||
|
||||
/**
|
||||
* The number of key-value mappings contained in this map.
|
||||
@ -228,8 +223,14 @@ public class HashMap<K,V>
|
||||
throw new IllegalArgumentException("Illegal load factor: " +
|
||||
loadFactor);
|
||||
|
||||
// Find a power of 2 >= initialCapacity
|
||||
int capacity = 1;
|
||||
while (capacity < initialCapacity)
|
||||
capacity <<= 1;
|
||||
|
||||
this.loadFactor = loadFactor;
|
||||
threshold = initialCapacity;
|
||||
threshold = (int)Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
|
||||
table = new Entry<?,?>[capacity];
|
||||
init();
|
||||
}
|
||||
|
||||
@ -264,30 +265,9 @@ public class HashMap<K,V>
|
||||
public HashMap(Map<? extends K, ? extends V> m) {
|
||||
this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
|
||||
DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
|
||||
inflateTable(threshold);
|
||||
|
||||
putAllForCreate(m);
|
||||
}
|
||||
|
||||
private static int roundUpToPowerOf2(int number) {
|
||||
int rounded = (rounded = Integer.highestOneBit(number)) != 0
|
||||
? (Integer.bitCount(number) > 1) ? rounded << 1 : rounded
|
||||
: 1;
|
||||
|
||||
return rounded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inflate the table
|
||||
*/
|
||||
final void inflateTable(int toSize) {
|
||||
// Find a power of 2 >= initialCapacity
|
||||
int capacity = roundUpToPowerOf2(toSize);
|
||||
|
||||
threshold = (int) Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
|
||||
table = new Entry[capacity];
|
||||
}
|
||||
|
||||
// internal utilities
|
||||
|
||||
/**
|
||||
@ -325,7 +305,6 @@ public class HashMap<K,V>
|
||||
* Returns index for hash code h.
|
||||
*/
|
||||
static int indexFor(int h, int length) {
|
||||
if (Integer.bitCount(length) != 1) { throw new Error("Ya dun messed up good"); }
|
||||
return h & (length-1);
|
||||
}
|
||||
|
||||
@ -390,10 +369,6 @@ public class HashMap<K,V>
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
final Entry<K,V> getEntry(Object key) {
|
||||
if (isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int hash = (key == null) ? 0 : hash(key);
|
||||
for (Entry<?,?> e = table[indexFor(hash, table.length)];
|
||||
e != null;
|
||||
@ -406,6 +381,7 @@ public class HashMap<K,V>
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Associates the specified value with the specified key in this map.
|
||||
* If the map previously contained a mapping for the key, the old
|
||||
@ -419,9 +395,6 @@ public class HashMap<K,V>
|
||||
* previously associated <tt>null</tt> with <tt>key</tt>.)
|
||||
*/
|
||||
public V put(K key, V value) {
|
||||
if (table == EMPTY_TABLE) {
|
||||
inflateTable(threshold);
|
||||
}
|
||||
if (key == null)
|
||||
return putForNullKey(value);
|
||||
int hash = hash(key);
|
||||
@ -556,10 +529,6 @@ public class HashMap<K,V>
|
||||
if (numKeysToBeAdded == 0)
|
||||
return;
|
||||
|
||||
if (table == EMPTY_TABLE) {
|
||||
inflateTable(Math.max((int) (numKeysToBeAdded * loadFactor), threshold));
|
||||
}
|
||||
|
||||
/*
|
||||
* Expand the map if the map if the number of mappings to be added
|
||||
* is greater than or equal to threshold. This is conservative; the
|
||||
@ -604,9 +573,6 @@ public class HashMap<K,V>
|
||||
* for this key.
|
||||
*/
|
||||
final Entry<K,V> removeEntryForKey(Object key) {
|
||||
if(isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
int hash = (key == null) ? 0 : hash(key);
|
||||
int i = indexFor(hash, table.length);
|
||||
@SuppressWarnings("unchecked")
|
||||
@ -639,7 +605,7 @@ public class HashMap<K,V>
|
||||
* for matching.
|
||||
*/
|
||||
final Entry<K,V> removeMapping(Object o) {
|
||||
if (isEmpty() || !(o instanceof Map.Entry))
|
||||
if (!(o instanceof Map.Entry))
|
||||
return null;
|
||||
|
||||
Map.Entry<?,?> entry = (Map.Entry<?,?>) o;
|
||||
@ -675,7 +641,9 @@ public class HashMap<K,V>
|
||||
*/
|
||||
public void clear() {
|
||||
modCount++;
|
||||
Arrays.fill(table, null);
|
||||
Entry<?,?>[] tab = table;
|
||||
for (int i = 0; i < tab.length; i++)
|
||||
tab[i] = null;
|
||||
size = 0;
|
||||
}
|
||||
|
||||
@ -688,10 +656,6 @@ public class HashMap<K,V>
|
||||
* specified value
|
||||
*/
|
||||
public boolean containsValue(Object value) {
|
||||
if(isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value == null)
|
||||
return containsNullValue();
|
||||
|
||||
@ -729,9 +693,7 @@ public class HashMap<K,V>
|
||||
} catch (CloneNotSupportedException e) {
|
||||
// assert false;
|
||||
}
|
||||
result.table = (table == EMPTY_TABLE)
|
||||
? EMPTY_TABLE
|
||||
: new Entry<?,?>[table.length];
|
||||
result.table = new Entry<?,?>[table.length];
|
||||
result.entrySet = null;
|
||||
result.modCount = 0;
|
||||
result.size = 0;
|
||||
@ -787,7 +749,8 @@ public class HashMap<K,V>
|
||||
}
|
||||
|
||||
public final int hashCode() {
|
||||
return Objects.hashCode(getKey()) ^ Objects.hashCode(getValue());
|
||||
return (key==null ? 0 : key.hashCode()) ^
|
||||
(value==null ? 0 : value.hashCode());
|
||||
}
|
||||
|
||||
public final String toString() {
|
||||
@ -1045,7 +1008,7 @@ public class HashMap<K,V>
|
||||
* serialize it).
|
||||
*
|
||||
* @serialData The <i>capacity</i> of the HashMap (the length of the
|
||||
* bucket array, a power of 2) is emitted (int), followed by the
|
||||
* bucket array) is emitted (int), followed by the
|
||||
* <i>size</i> (an int, the number of key-value
|
||||
* mappings), followed by the key (Object) and value (Object)
|
||||
* for each key-value mapping. The key-value mappings are
|
||||
@ -1054,14 +1017,14 @@ public class HashMap<K,V>
|
||||
private void writeObject(java.io.ObjectOutputStream s)
|
||||
throws IOException
|
||||
{
|
||||
Iterator<Map.Entry<K,V>> i =
|
||||
(size > 0) ? entrySet0().iterator() : null;
|
||||
|
||||
// Write out the threshold, loadfactor, and any hidden stuff
|
||||
s.defaultWriteObject();
|
||||
|
||||
// Write out number of buckets
|
||||
if (table==EMPTY_TABLE)
|
||||
s.writeInt(roundUpToPowerOf2(threshold));
|
||||
else
|
||||
s.writeInt(table.length);
|
||||
s.writeInt(table.length);
|
||||
|
||||
// Write out size (number of Mappings)
|
||||
s.writeInt(size);
|
||||
@ -1095,15 +1058,7 @@ public class HashMap<K,V>
|
||||
sun.misc.Hashing.randomHashSeed(this));
|
||||
|
||||
// Read in number of buckets and allocate the bucket array;
|
||||
table = EMPTY_TABLE;
|
||||
|
||||
int buckets = s.readInt();
|
||||
|
||||
if ((buckets < 0) || // negative
|
||||
(buckets > HashMap.MAXIMUM_CAPACITY) || // fits in array
|
||||
(Integer.bitCount(buckets) > 1)) /* not power of 2 or zero */ {
|
||||
throw new InvalidObjectException("Illegal capacity: " + buckets);
|
||||
}
|
||||
s.readInt(); // ignored
|
||||
|
||||
// Read number of mappings
|
||||
int mappings = s.readInt();
|
||||
@ -1111,24 +1066,23 @@ public class HashMap<K,V>
|
||||
throw new InvalidObjectException("Illegal mappings count: " +
|
||||
mappings);
|
||||
|
||||
int mappingsCapacity = Math.max(
|
||||
(int) Math.min(
|
||||
// capacity chosen by number of mappings
|
||||
// and desired load (if >= 0.25)
|
||||
mappings * Math.min(1 / loadFactor, 4.0f),
|
||||
// we have limits...
|
||||
HashMap.MAXIMUM_CAPACITY),
|
||||
// maybe they want extra buckets.
|
||||
buckets);
|
||||
|
||||
if(mappings > 0) {
|
||||
inflateTable(mappingsCapacity);
|
||||
} else {
|
||||
threshold = mappingsCapacity;
|
||||
int initialCapacity = (int) Math.min(
|
||||
// capacity chosen by number of mappings
|
||||
// and desired load (if >= 0.25)
|
||||
mappings * Math.min(1 / loadFactor, 4.0f),
|
||||
// we have limits...
|
||||
HashMap.MAXIMUM_CAPACITY);
|
||||
int capacity = 1;
|
||||
// find smallest power of two which holds all mappings
|
||||
while (capacity < initialCapacity) {
|
||||
capacity <<= 1;
|
||||
}
|
||||
|
||||
table = new Entry<?,?>[capacity];
|
||||
threshold = (int) Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
|
||||
init(); // Give subclass a chance to do its thing.
|
||||
|
||||
|
||||
// Read the keys and values, and put the mappings in the HashMap
|
||||
for (int i=0; i<mappings; i++) {
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -1,221 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*/
|
||||
|
||||
/*
|
||||
* @test
|
||||
* @bug 7143928
|
||||
* @run testng BasicSerialization
|
||||
* @summary Ensure Maps can be serialized and deserialized.
|
||||
* @author Mike Duigou
|
||||
*/
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
|
||||
import org.testng.annotations.Test;
|
||||
import org.testng.annotations.DataProvider;
|
||||
import static org.testng.Assert.fail;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
import static org.testng.Assert.assertFalse;
|
||||
import static org.testng.Assert.assertSame;
|
||||
|
||||
public class BasicSerialization {
|
||||
|
||||
enum IntegerEnum {
|
||||
|
||||
e0, e1, e2, e3, e4, e5, e6, e7, e8, e9,
|
||||
e10, e11, e12, e13, e14, e15, e16, e17, e18, e19,
|
||||
e20, e21, e22, e23, e24, e25, e26, e27, e28, e29,
|
||||
e30, e31, e32, e33, e34, e35, e36, e37, e38, e39,
|
||||
e40, e41, e42, e43, e44, e45, e46, e47, e48, e49,
|
||||
e50, e51, e52, e53, e54, e55, e56, e57, e58, e59,
|
||||
e60, e61, e62, e63, e64, e65, e66, e67, e68, e69,
|
||||
e70, e71, e72, e73, e74, e75, e76, e77, e78, e79,
|
||||
e80, e81, e82, e83, e84, e85, e86, e87, e88, e89,
|
||||
e90, e91, e92, e93, e94, e95, e96, e97, e98, e99,
|
||||
EXTRA_KEY;
|
||||
public static final int SIZE = values().length;
|
||||
};
|
||||
private static final int TEST_SIZE = IntegerEnum.SIZE - 1;
|
||||
/**
|
||||
* Realized keys ensure that there is always a hard ref to all test objects.
|
||||
*/
|
||||
private static final IntegerEnum[] KEYS = new IntegerEnum[TEST_SIZE];
|
||||
/**
|
||||
* Realized values ensure that there is always a hard ref to all test
|
||||
* objects.
|
||||
*/
|
||||
private static final String[] VALUES = new String[TEST_SIZE];
|
||||
|
||||
static {
|
||||
IntegerEnum[] keys = IntegerEnum.values();
|
||||
for (int each = 0; each < TEST_SIZE; each++) {
|
||||
KEYS[each] = keys[each];
|
||||
VALUES[each] = keys[each].name();
|
||||
}
|
||||
}
|
||||
private static final IntegerEnum EXTRA_KEY = IntegerEnum.EXTRA_KEY;
|
||||
private static final String EXTRA_VALUE = IntegerEnum.EXTRA_KEY.name();
|
||||
|
||||
public static <K, V> Map<K, V> mapClone(Map<K, V> map) {
|
||||
Method cloneMethod;
|
||||
|
||||
try {
|
||||
cloneMethod = map.getClass().getMethod("clone", new Class[]{});
|
||||
} catch (NoSuchMethodException | SecurityException all) {
|
||||
cloneMethod = null;
|
||||
}
|
||||
|
||||
if (null != cloneMethod) {
|
||||
try {
|
||||
Map<K, V> result = (Map<K, V>)cloneMethod.invoke(map, new Object[]{});
|
||||
return result;
|
||||
} catch (Exception all) {
|
||||
fail("clone() failed " + map.getClass().getSimpleName(), all);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
Constructor<? extends Map> copyConstructor;
|
||||
try {
|
||||
copyConstructor = (Constructor<? extends Map>)map.getClass().getConstructor(new Class[]{Map.class});
|
||||
|
||||
Map<K, V> result = (Map<K, V>)copyConstructor.newInstance(new Object[]{map});
|
||||
|
||||
return result;
|
||||
} catch (Exception all) {
|
||||
return serialClone(map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test(dataProvider = "Map<IntegerEnum,String>")
|
||||
public void testSerialization(String description, Map<IntegerEnum, String> map) {
|
||||
Object foo = new Object();
|
||||
|
||||
Map<IntegerEnum, String> clone = mapClone(map);
|
||||
Map<IntegerEnum, String> serialClone = serialClone(map);
|
||||
|
||||
assertEquals(map, map, description + ":should equal self");
|
||||
assertEquals(clone, map, description + ":should equal clone");
|
||||
assertEquals(map, clone, description + ": should equal orginal map");
|
||||
assertEquals(serialClone, map, description + ": should equal deserialized clone");
|
||||
assertEquals(map, serialClone, description + ": should equal original map");
|
||||
assertEquals(serialClone, clone, description + ": deserialized clone should equal clone");
|
||||
assertEquals(clone, serialClone, description + ": clone should equal deserialized clone");
|
||||
|
||||
assertFalse(map.containsKey(EXTRA_KEY), description + ":unexpected key");
|
||||
assertFalse(clone.containsKey(EXTRA_KEY), description + ":unexpected key");
|
||||
assertFalse(serialClone.containsKey(EXTRA_KEY), description + ":unexpected key");
|
||||
map.put(EXTRA_KEY, EXTRA_VALUE);
|
||||
clone.put(EXTRA_KEY, EXTRA_VALUE);
|
||||
serialClone.put(EXTRA_KEY, EXTRA_VALUE);
|
||||
assertTrue(map.containsKey(EXTRA_KEY), description + ":missing key");
|
||||
assertTrue(clone.containsKey(EXTRA_KEY), description + ":missing key");
|
||||
assertTrue(serialClone.containsKey(EXTRA_KEY), description + ":missing key");
|
||||
assertSame(map.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
|
||||
assertSame(clone.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
|
||||
assertSame(serialClone.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
|
||||
|
||||
assertEquals(map, map, description + ":should equal self");
|
||||
assertEquals(clone, map, description + ":should equal clone");
|
||||
assertEquals(map, clone, description + ": should equal orginal map");
|
||||
assertEquals(serialClone, map, description + ": should equal deserialized clone");
|
||||
assertEquals(map, serialClone, description + ": should equal original map");
|
||||
assertEquals(serialClone, clone, description + ": deserialized clone should equal clone");
|
||||
assertEquals(clone, serialClone, description + ": clone should equal deserialized clone");
|
||||
}
|
||||
|
||||
static byte[] serializedForm(Object obj) {
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
new ObjectOutputStream(baos).writeObject(obj);
|
||||
return baos.toByteArray();
|
||||
} catch (IOException e) {
|
||||
fail("Unexpected Exception", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
static Object readObject(byte[] bytes) throws IOException, ClassNotFoundException {
|
||||
InputStream is = new ByteArrayInputStream(bytes);
|
||||
return new ObjectInputStream(is).readObject();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T> T serialClone(T obj) {
|
||||
try {
|
||||
return (T)readObject(serializedForm(obj));
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
fail("Unexpected Exception", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@DataProvider(name = "Map<IntegerEnum,String>", parallel = true)
|
||||
private static Iterator<Object[]> makeMaps() {
|
||||
return Arrays.asList(
|
||||
// empty
|
||||
new Object[]{"HashMap", new HashMap()},
|
||||
new Object[]{"LinkedHashMap", new LinkedHashMap()},
|
||||
new Object[]{"Collections.checkedMap(HashMap)", Collections.checkedMap(new HashMap(), IntegerEnum.class, String.class)},
|
||||
new Object[]{"Collections.synchronizedMap(HashMap)", Collections.synchronizedMap(new HashMap())},
|
||||
// null hostile
|
||||
new Object[]{"EnumMap", new EnumMap(IntegerEnum.class)},
|
||||
new Object[]{"Hashtable", new Hashtable()},
|
||||
new Object[]{"TreeMap", new TreeMap()},
|
||||
new Object[]{"ConcurrentHashMap", new ConcurrentHashMap()},
|
||||
new Object[]{"ConcurrentSkipListMap", new ConcurrentSkipListMap()},
|
||||
new Object[]{"Collections.checkedMap(ConcurrentHashMap)", Collections.checkedMap(new ConcurrentHashMap(), IntegerEnum.class, String.class)},
|
||||
new Object[]{"Collections.synchronizedMap(EnumMap)", Collections.synchronizedMap(new EnumMap(IntegerEnum.class))},
|
||||
// filled
|
||||
new Object[]{"HashMap", fillMap(new HashMap())},
|
||||
new Object[]{"LinkedHashMap", fillMap(new LinkedHashMap())},
|
||||
new Object[]{"Collections.checkedMap(HashMap)", Collections.checkedMap(fillMap(new HashMap()), IntegerEnum.class, String.class)},
|
||||
new Object[]{"Collections.synchronizedMap(HashMap)", Collections.synchronizedMap(fillMap(new HashMap()))},
|
||||
// null hostile
|
||||
new Object[]{"EnumMap", fillMap(new EnumMap(IntegerEnum.class))},
|
||||
new Object[]{"Hashtable", fillMap(new Hashtable())},
|
||||
new Object[]{"TreeMap", fillMap(new TreeMap())},
|
||||
new Object[]{"ConcurrentHashMap", fillMap(new ConcurrentHashMap())},
|
||||
new Object[]{"ConcurrentSkipListMap", fillMap(new ConcurrentSkipListMap())},
|
||||
new Object[]{"Collections.checkedMap(ConcurrentHashMap)", Collections.checkedMap(fillMap(new ConcurrentHashMap()), IntegerEnum.class, String.class)},
|
||||
new Object[]{"Collections.synchronizedMap(EnumMap)", Collections.synchronizedMap(fillMap(new EnumMap(IntegerEnum.class)))}).iterator();
|
||||
}
|
||||
|
||||
private static Map<IntegerEnum, String> fillMap(Map<IntegerEnum, String> result) {
|
||||
for (int each = 0; each < TEST_SIZE; each++) {
|
||||
result.put(KEYS[each], VALUES[each]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user