You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@logging.apache.org by rp...@apache.org on 2016/08/07 13:07:35 UTC
logging-log4j2 git commit: LOG4J2-1447 Open hash map based
ContextData implementation
Repository: logging-log4j2
Updated Branches:
refs/heads/LOG4J2-1010&LOG4J2-1447-injectable-contextdata&better-datastructure 1e4ea9bc0 -> c846a7572
LOG4J2-1447 Open hash map based ContextData implementation
Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/c846a757
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/c846a757
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/c846a757
Branch: refs/heads/LOG4J2-1010&LOG4J2-1447-injectable-contextdata&better-datastructure
Commit: c846a75729edff066a91213a10386403204db2b2
Parents: 1e4ea9b
Author: rpopma <rp...@apache.org>
Authored: Sun Aug 7 22:07:40 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sun Aug 7 22:07:40 2016 +0900
----------------------------------------------------------------------
.../log4j/core/impl/OpenHashMapContextData.java | 892 +++++++++++++++++++
.../core/impl/OpenHashMapContextDataTest.java | 506 +++++++++++
2 files changed, 1398 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/c846a757/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/OpenHashMapContextData.java
----------------------------------------------------------------------
diff --git a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/OpenHashMapContextData.java b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/OpenHashMapContextData.java
new file mode 100644
index 0000000..bf7e60a
--- /dev/null
+++ b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/OpenHashMapContextData.java
@@ -0,0 +1,892 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+package org.apache.logging.log4j.core.impl;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.ConcurrentModificationException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.apache.logging.log4j.core.ContextData;
+import org.apache.logging.log4j.core.util.BiConsumer;
+import org.apache.logging.log4j.core.util.Integers;
+import org.apache.logging.log4j.core.util.TriConsumer;
+import org.apache.logging.log4j.spi.ThreadContextMap;
+
+/**
+ * Open hash map-based implementation of the {@code ContextData} interface.
+ * Implementation based on <a href="http://fastutil.di.unimi.it/">fastutil</a>'s
+ * <a href="http://fastutil.di.unimi.it/docs/it/unimi/dsi/fastutil/objects/Object2ObjectOpenHashMap.html">Object2ObjectOpenHashMap</a>.
+ * <p>
+ * A type-specific hash map with a fast, small-footprint implementation.
+ *
+ * <P>
+ * Instances of this class use a hash table to represent a map. The table is
+ * filled up to a specified <em>load factor</em>, and then doubled in size to
+ * accommodate new entries. If the table is emptied below <em>one fourth</em> of
+ * the load factor, it is halved in size. However, halving is not performed when
+ * deleting entries from an iterator, as it would interfere with the iteration
+ * process.
+ *
+ * <p>
+ * Note that {@link #clear()} does not modify the hash table size. Rather, the
+ * {@link #trim(int)} method lets you control the size of
+ * the table; this is particularly useful if you reuse instances of this class.
+ * <p>
+ * <ul>
+ * <li>Garbage-free iteration over key-value pairs with {@code BiConsumer} and {@code TriConsumer}.</li>
+ * <li>Fast copy. If the ThreadContextMap is also an instance of {@code OpenHashMapContextData},
+ * the full thread context data can be transferred with two array copies and five field updates.</li>
+ * </ul>
+ *
+ * @see ThreadContextDataInjector
+ * @since 2.7
+ */
+public class OpenHashMapContextData<K, V> implements MutableContextData, ThreadContextMap {
+ /** The initial default size of a hash table. */
+ public static final int DEFAULT_INITIAL_SIZE = 16;
+
+ /** The default load factor of a hash table. */
+ public static final float DEFAULT_LOAD_FACTOR = .75f;
+
+ private static final long serialVersionUID = -1486744623338827187L;
+
+ /** The array of keys. */
+ protected transient K[] keys;
+ /** The array of values. */
+ protected transient V[] values;
+ /** The mask for wrapping a position counter. */
+ protected transient int mask;
+ /** Whether this set contains the key zero. */
+ protected transient boolean containsNullKey;
+ /** The current table size. */
+ protected transient int arraySize;
+ /**
+ * Threshold after which we rehash. It must be the table size times {@link #loadFactor}.
+ */
+ protected transient int maxFill;
+ /** Number of entries in the set (including the key zero, if present). */
+ protected int size;
+ /** The acceptable load factor. */
+ protected final float loadFactor;
+
+ private V defRetValue = null;
+
+ /**
+ * Creates a new hash map with initial expected
+ * {@link #DEFAULT_INITIAL_SIZE} entries and
+ * {@link #DEFAULT_LOAD_FACTOR} as load factor.
+ */
+ public OpenHashMapContextData() {
+ this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
+ }
+ /**
+ * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load factor.
+ *
+ * @param expected
+ * the expected number of elements in the hash map.
+ */
+ public OpenHashMapContextData(final int expected) {
+ this(expected, DEFAULT_LOAD_FACTOR);
+ }
+ /**
+ * Creates a new hash map.
+ *
+ * <p>
+ * The actual table size will be the least power of two greater than
+ * <code>expected</code>/<code>f</code>.
+ *
+ * @param expected
+ * the expected number of elements in the hash set.
+ * @param f
+ * the load factor.
+ */
+ @SuppressWarnings("unchecked")
+ public OpenHashMapContextData(final int expected, final float f) {
+ if (f <= 0 || f > 1) {
+ throw new IllegalArgumentException(
+ "Load factor must be greater than 0 and smaller than or equal to 1");
+ }
+ if (expected < 0){
+ throw new IllegalArgumentException(
+ "The expected number of elements must be nonnegative");
+ }
+ this.loadFactor = f;
+ arraySize = HashCommon.arraySize(expected, f);
+ mask = arraySize - 1;
+ maxFill = HashCommon.maxFill(arraySize, f);
+ keys = (K[]) new Object[arraySize + 1];
+ values = (V[]) new Object[arraySize + 1];
+ }
+ /**
+ * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
+ * factor copying a given one.
+ *
+ * @param map
+ * a {@link Map} to be copied into the new hash map.
+ */
+ public OpenHashMapContextData(final Map<? extends K, ? extends V> map) {
+ this(map, DEFAULT_LOAD_FACTOR);
+ }
+ /**
+ * Creates a new hash map copying a given one.
+ *
+ * @param map
+ * a {@link Map} to be copied into the new hash map.
+ * @param f
+ * the load factor.
+ */
+ public OpenHashMapContextData(final Map<? extends K, ? extends V> map, final float f) {
+ this(map.size(), f);
+ putAll(map);
+ }
+
+ /**
+ * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
+ * factor copying a given type-specific one.
+ *
+ * @param contextData
+ * a type-specific map to be copied into the new hash map.
+ */
+ public OpenHashMapContextData(final ContextData contextData) {
+ this(contextData, DEFAULT_LOAD_FACTOR);
+ }
+ /**
+ * Creates a new hash map copying a given type-specific one.
+ *
+ * @param contextData
+ * a type-specific map to be copied into the new hash map.
+ * @param f
+ * the load factor.
+ */
+ public OpenHashMapContextData(final ContextData contextData, final float f) {
+ this(contextData.size(), f);
+ if (contextData instanceof OpenHashMapContextData) {
+ initFrom0((OpenHashMapContextData) contextData);
+ } else {
+ contextData.forEach(PUT_ALL, this);
+ }
+ }
+ private static final TriConsumer<String, Object, MutableContextData> PUT_ALL =
+ new TriConsumer<String, Object, MutableContextData>() {
+ @Override
+ public void accept(final String key, final Object value, final MutableContextData contextData) {
+ contextData.putValue(key, value);
+ }
+ };
+
+ @SuppressWarnings("unchecked")
+ private void initFrom0(final OpenHashMapContextData other) {
+ // this.loadFactor = other.loadFactor; // final field
+ this.arraySize = other.arraySize;
+ this.size = other.size;
+ this.containsNullKey = other.containsNullKey;
+ this.mask = other.mask;
+ this.maxFill = other.maxFill;
+ keys = (K[]) Arrays.copyOf(other.keys, arraySize + 1);
+ values = (V[]) Arrays.copyOf(other.values, arraySize + 1);
+ }
+
+ private int realSize() {
+ return containsNullKey ? size - 1 : size;
+ }
+
+ private void ensureCapacity(final int capacity) {
+ final int needed = HashCommon.arraySize(capacity, loadFactor);
+ if (needed > arraySize) {
+ rehash(needed);
+ }
+ }
+
+ private void tryCapacity(final long capacity) {
+ final int needed = (int) Math.min(
+ 1 << 30, Math.max(2, Integers.ceilingNextPowerOfTwo((int) Math.ceil(capacity / loadFactor))));
+ if (needed > arraySize) {
+ rehash(needed);
+ }
+ }
+
+ /** {@inheritDoc} */
+ public void putAll(Map<? extends K, ? extends V> map) {
+ if (loadFactor <= .5) {
+ // The resulting map will be sized for m.size() elements
+ ensureCapacity(map.size());
+ } else {
+ // The resulting map will be tentatively sized for size() + m.size() elements
+ tryCapacity(size() + map.size());
+ }
+ for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
+ putObjectValue(entry.getKey(), entry.getValue());
+ }
+ }
+
+ @Override
+ public Map<String, String> asMap() {
+ final Map<String, String> result = new HashMap<>(size);
+ forEach(COPY_INTO_MAP, result);
+ return result;
+ }
+
+ private static final TriConsumer<String, Object, Map<String, String>> COPY_INTO_MAP =
+ new TriConsumer<String, Object, Map<String, String>>() {
+ @Override
+ public void accept(final String k, final Object v, final Map<String, String> map) {
+ map.put(k, v == null ? null : v.toString());
+ }
+ };
+
+ /*
+ * Removes all elements from this map.
+ *
+ * <P>To increase object reuse, this method does not change the table size.
+ * If you want to reduce the table size, you must use {@link #trim()}.
+ */
+ @Override
+ public void clear() {
+ if (size == 0) {
+ return;
+ }
+ size = 0;
+ containsNullKey = false;
+ Arrays.fill(keys, (null));
+ Arrays.fill(values, null);
+ }
+
+ @Override
+ public boolean containsKey(final String key) {
+ return containsObjectKey((Object) key);
+ }
+
+ @SuppressWarnings("unchecked")
+ private boolean containsObjectKey(final Object k) {
+ if (k == null) {
+ return containsNullKey;
+ }
+ K curr;
+ final K[] key = this.keys;
+ int pos;
+ // The starting point.
+ if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
+ return false;
+ }
+ if (k.equals(curr)) {
+ return true;
+ }
+ // There's always an unused entry.
+ while (true) {
+ if ((curr = key[pos = (pos + 1) & mask]) == null) {
+ return false;
+ }
+ if (k.equals(curr)) {
+ return true;
+ }
+ }
+ }
+
+ public boolean equals(final Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof ContextData)) {
+ return false;
+ }
+ final ContextData other = (ContextData) obj;
+ if (other.size() != size()) {
+ return false;
+ }
+ int pos = arraySize;
+ if (containsNullKey) {
+ if (!Objects.equals(getObjectValue(null), other.getValue(null))) {
+ return false;
+ }
+ }
+ --pos;
+ final K myKeys[] = this.keys;
+ for (; pos >= 0; pos--) {
+ K k;
+ if ((k = myKeys[pos]) != null) {
+ if (!Objects.equals(values[pos], other.getValue((String) k))) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <VAL> void forEach(final BiConsumer<String, ? super VAL> action) {
+ final int startSize = size;
+ final K myKeys[] = this.keys;
+ int pos = arraySize;
+ if (containsNullKey) {
+ action.accept((String) myKeys[pos], (VAL) values[pos]);
+ if (size != startSize) {
+ throw new ConcurrentModificationException();
+ }
+ }
+ --pos;
+ for (; pos >= 0; pos--) {
+ if (myKeys[pos] != null) {
+ action.accept((String) myKeys[pos], (VAL) values[pos]);
+ if (size != startSize) {
+ throw new ConcurrentModificationException();
+ }
+ }
+ }
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <VAL, STATE> void forEach(final TriConsumer<String, ? super VAL, STATE> action, final STATE state) {
+ final int startSize = size;
+ final K myKeys[] = this.keys;
+ int pos = arraySize;
+ if (containsNullKey) {
+ action.accept((String) myKeys[pos], (VAL) values[pos], state);
+ if (size != startSize) {
+ throw new ConcurrentModificationException();
+ }
+ }
+ --pos;
+ for (; pos >= 0; pos--) {
+ if (myKeys[pos] != null) {
+ action.accept((String) myKeys[pos], (VAL) values[pos], state);
+ if (size != startSize) {
+ throw new ConcurrentModificationException();
+ }
+ }
+ }
+ }
+
+ @Override
+ public String get(final String key) {
+ return (String) getObjectValue(key);
+ }
+
+ @SuppressWarnings("unchecked")
+ private V getObjectValue(final Object k) {
+ if (k == null) {
+ return containsNullKey ? values[arraySize] : defRetValue;
+ }
+ K curr;
+ final K[] key = this.keys;
+ int pos;
+ // The starting point.
+ if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
+ return defRetValue;
+ }
+ if (k.equals(curr)) {
+ return values[pos];
+ }
+ // There's always an unused entry.
+ while (true) {
+ if (((curr = key[pos = (pos + 1) & mask]) == null)) {
+ return defRetValue;
+ }
+ if (k.equals(curr)) {
+ return values[pos];
+ }
+ }
+ }
+
+ @Override
+ public Map<String, String> getCopy() {
+ return asMap();
+ }
+
+ @Override
+ public Map<String, String> getImmutableMapOrNull() {
+ return isEmpty() ? null : Collections.unmodifiableMap(asMap());
+ }
+
+ @Override
+ public <VAL> VAL getValue(final String key) {
+ return (VAL) getObjectValue((Object) key);
+ }
+
+ @Override
+ public boolean isEmpty() {
+ return size == 0;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void put(final String key, final String value) {
+ putObjectValue((K) key, (V) value);
+ }
+
+ private int insert(final K k, final V v) {
+ int pos;
+ if (k == null) {
+ if (containsNullKey) {
+ return arraySize;
+ }
+ containsNullKey = true;
+ pos = arraySize;
+ } else {
+ K curr;
+ final K[] key = this.keys;
+ // The starting point.
+ if (!((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null)) {
+ if (curr.equals(k)) {
+ return pos;
+ }
+ while (!((curr = key[pos = (pos + 1) & mask]) == null)) {
+ if (curr.equals(k)) {
+ return pos;
+ }
+ }
+ }
+ }
+ keys[pos] = k;
+ values[pos] = v;
+ if (size++ >= maxFill) {
+ rehash(HashCommon.arraySize(size + 1, loadFactor));
+ }
+ return -1;
+ }
+
+ @Override
+ public void putAll(final ContextData source) {
+ if (size() == 0 && source instanceof OpenHashMapContextData) {
+ initFrom0((OpenHashMapContextData) source);
+ } else if (source != null) {
+ source.forEach(PUT_ALL, this);
+ }
+ }
+
+ private V putObjectValue(final K k, final V v) {
+ final int pos = insert(k, v);
+ if (pos < 0) {
+ return defRetValue;
+ }
+ final V oldValue = values[pos];
+ values[pos] = v;
+ return oldValue;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void putValue(final String key, final Object value) {
+ putObjectValue((K) key, (V) value);
+ }
+
+ @Override
+ public void remove(final String key) {
+ removeObjectKey((Object) key);
+ }
+
+ @SuppressWarnings("unchecked")
+ private V removeObjectKey(final Object k) {
+ if (k == null) {
+ if (containsNullKey) {
+ return removeNullEntry();
+ }
+ return defRetValue;
+ }
+ final K[] key = this.keys;
+ int pos = HashCommon.mix(k.hashCode()) & mask;
+ K curr = key[pos & mask];
+ // The starting point.
+ if (curr == null) {
+ return defRetValue;
+ }
+ if (k.equals(curr)) {
+ return removeEntry(pos);
+ }
+ while (true) {
+ if ((curr = key[pos = (pos + 1) & mask]) == null) {
+ return defRetValue;
+ }
+ if (k.equals(curr)) {
+ return removeEntry(pos);
+ }
+ }
+ }
+ private V removeEntry(final int pos) {
+ final V oldValue = values[pos];
+ values[pos] = null;
+ size--;
+ shiftKeys(pos);
+ if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
+ rehash(arraySize / 2);
+ }
+ return oldValue;
+ }
+ private V removeNullEntry() {
+ containsNullKey = false;
+ keys[arraySize] = null;
+ final V oldValue = values[arraySize];
+ values[arraySize] = null;
+ size--;
+ if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
+ rehash(arraySize / 2);
+ }
+ return oldValue;
+ }
+ /**
+ * Shifts left entries with the specified hash code, starting at the
+ * specified position, and empties the resulting free entry.
+ *
+ * @param pos
+ * a starting position.
+ */
+ private void shiftKeys(int pos) {
+ // Shift entries with the same hash.
+ int last, slot;
+ K curr;
+ final K[] myKeys = this.keys;
+ for (;;) {
+ pos = ((last = pos) + 1) & mask;
+ for (;;) {
+ if (((curr = myKeys[pos]) == null)) {
+ myKeys[last] = (null);
+ values[last] = null;
+ return;
+ }
+ slot = HashCommon.mix(curr.hashCode()) & mask;
+ if (last <= pos ? (last >= slot || slot > pos) : (last >= slot && slot > pos)) {
+ break;
+ }
+ pos = (pos + 1) & mask;
+ }
+ myKeys[last] = curr;
+ values[last] = values[pos];
+ }
+ }
+
+ @Override
+ public int size() {
+ return size;
+ }
+
+ /**
+ * Rehashes this map if the table is too large.
+ *
+ * <P>
+ * Let <var>N</var> be the smallest table size that can hold
+ * <code>max(n,{@link #size()})</code> entries, still satisfying the load
+ * factor. If the current table size is smaller than or equal to
+ * <var>N</var>, this method does nothing. Otherwise, it rehashes this map
+ * in a table of size <var>N</var>.
+ *
+ * <P>
+ * This method is useful when reusing maps. {@linkplain #clear() Clearing a
+ * map} leaves the table size untouched. If you are reusing a map many times,
+ * you can call this method with a typical size to avoid keeping around a
+ * very large table just because of a few large transient maps.
+ *
+ * @param n
+ * the threshold for the trimming.
+ * @return true if there was enough memory to trim the map.
+ */
+ public boolean trim(final int n) {
+ final int l = HashCommon.nextPowerOfTwo((int) Math.ceil(n / loadFactor));
+ if (l >= n || size > HashCommon.maxFill(l, loadFactor))
+ return true;
+ try {
+ rehash(l);
+ } catch (OutOfMemoryError cantDoIt) { // unusual to catch OOME but in this case appropriate
+ return false;
+ }
+ return true;
+ }
+ /**
+ * Rehashes the map.
+ *
+ * <P>
+ * This method implements the basic rehashing strategy, and may be overriden
+ * by subclasses implementing different rehashing strategies (e.g.,
+ * disk-based rehashing). However, you should not override this method
+ * unless you understand the internal workings of this class.
+ *
+ * @param newN
+ * the new size
+ */
+ @SuppressWarnings("unchecked")
+ protected void rehash(final int newN) {
+ final K myKeys[] = this.keys;
+ final V myValues[] = this.values;
+ final int mask = newN - 1; // Note that this is used by the hashing
+ // macro
+ final K newKey[] = (K[]) new Object[newN + 1];
+ final V newValue[] = (V[]) new Object[newN + 1];
+ int i = arraySize, pos;
+ for (int j = realSize(); j-- != 0;) {
+ while (myKeys[--i] == null) {
+ // advance i until we find an existing key
+ }
+ if (newKey[pos = HashCommon.mix(myKeys[i].hashCode()) & mask] != null) { // rehash & check slot availability
+ while (newKey[pos = (pos + 1) & mask] != null) {
+ // find available slot at (or immediately following) pos
+ }
+ }
+ newKey[pos] = myKeys[i];
+ newValue[pos] = myValues[i];
+ }
+ newValue[newN] = myValues[arraySize];
+ arraySize = newN;
+ this.mask = mask;
+ maxFill = HashCommon.maxFill(arraySize, loadFactor);
+ this.keys = newKey;
+ this.values = newValue;
+ }
+
+ /**
+ * Returns a hash code for this map.
+ *
+ * This method overrides the generic method provided by the superclass.
+ * Since <code>equals()</code> is not overriden, it is important that the
+ * value returned by this method is the same value as the one returned by
+ * the overriden method.
+ *
+ * @return a hash code for this map.
+ */
+ public int hashCode() {
+ int result = 0;
+ for (int j = realSize(), i = 0, t = 0; j-- != 0;) {
+ while (keys[i] == null) {
+ i++;
+ }
+ if (this != keys[i]) {
+ t = keys[i].hashCode();
+ }
+ if (this != values[i]) {
+ t ^= (values[i] == null ? 0 : values[i].hashCode());
+ }
+ result += t;
+ i++;
+ }
+ // Zero / null keys have hash zero.
+ if (containsNullKey) {
+ result += (values[arraySize] == null ? 0 : values[arraySize].hashCode());
+ }
+ return result;
+ }
+
+ @SuppressWarnings("unchecked")
+ private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException {
+ s.defaultReadObject();
+ arraySize = HashCommon.arraySize(size, loadFactor);
+ maxFill = HashCommon.maxFill(arraySize, loadFactor);
+ mask = arraySize - 1;
+ final K key[] = this.keys = (K[]) new Object[arraySize + 1];
+ final V value[] = this.values = (V[]) new Object[arraySize + 1];
+ K k;
+ V v;
+ for (int i = size, pos; i-- != 0;) {
+ k = (K) s.readObject();
+ v = (V) s.readObject();
+ if (k == null) {
+ pos = arraySize;
+ containsNullKey = true;
+ } else {
+ pos = HashCommon.mix(k.hashCode()) & mask;
+ while (key[pos] != null) {
+ pos = (pos + 1) & mask;
+ }
+ }
+ key[pos] = k;
+ value[pos] = v;
+ }
+ }
+
+ private void writeObject(final ObjectOutputStream s) throws IOException {
+ s.defaultWriteObject();
+ try {
+ forEach(SERIALIZER, s);
+ } catch (final RuntimeException runex) {
+ if (runex.getCause() instanceof IOException) {
+ throw (IOException) runex.getCause();
+ }
+ throw runex;
+ }
+ }
+
+ private static final TriConsumer<String, Object, ObjectOutputStream> SERIALIZER =
+ new TriConsumer<String, Object, ObjectOutputStream>() {
+ @Override
+ public void accept(final String k, final Object v, final ObjectOutputStream objectOutputStream) {
+ try {
+ objectOutputStream.writeObject(k);
+ objectOutputStream.writeObject(v);
+ } catch (final IOException ioex) {
+ throw new IllegalStateException(ioex);
+ }
+ }
+ };
+
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder(256);
+ sb.append('{');
+ final K myKeys[] = this.keys;
+ int pos = arraySize;
+ boolean first = true;
+ if (containsNullKey) {
+ sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
+ sb.append('=');
+ sb.append(values[pos] == this ? "(this map)" : values[pos]);
+ first = false;
+ }
+ --pos;
+ for (; pos >= 0; pos--) {
+ if (myKeys[pos] != null) {
+ if (first) {
+ first = false;
+ } else {
+ sb.append(", ");
+ }
+ sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
+ sb.append('=');
+ sb.append(values[pos] == this ? "(this map)" : values[pos]);
+ }
+ }
+ sb.append('}');
+ return sb.toString();
+ }
+
+ private static class HashCommon {
+ private HashCommon() {}
+
+ /** 2<sup>32</sup> · φ, φ = (√5 − 1)/2. */
+ private static final int INT_PHI = 0x9E3779B9;
+
+ /** The reciprocal of {@link #INT_PHI} modulo 2<sup>32</sup>. */
+ private static final int INV_INT_PHI = 0x144cbc89;
+
+ /** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3.
+ *
+ * <p>This method implements the finalisation step of Austin Appleby's
+ * <a href="http://code.google.com/p/smhasher/">MurmurHash3</a>.
+ * Its purpose is to avalanche the bits of the argument to within 0.25% bias.
+ *
+ * @param x an integer.
+ * @return a hash value with good avalanching properties.
+ */
+ public static int murmurHash3(int x) {
+ x ^= x >>> 16;
+ x *= 0x85ebca6b;
+ x ^= x >>> 13;
+ x *= 0xc2b2ae35;
+ x ^= x >>> 16;
+ return x;
+ }
+
+ /**
+ * Quickly mixes the bits of an integer.
+ *
+ * <p>This method mixes the bits of the argument by multiplying by the golden ratio and
+ * xorshifting the result. It is borrowed from <a href="https://github.com/OpenHFT/Koloboke">Koloboke</a>, and
+ * it has slightly worse behaviour than {@link #murmurHash3(int)} (in open-addressing hash tables the average
+ * number of probes is slightly larger), but it's much faster.
+ *
+ * @param x an integer.
+ * @return a hash value obtained by mixing the bits of {@code x}.
+ * @see #invMix(int)
+ */
+ public static int mix(final int x) {
+ final int h = x * INT_PHI;
+ return h ^ (h >>> 16);
+ }
+
+ /** The inverse of {@link #mix(int)}. This method is mainly useful to create unit tests.
+ *
+ * @param x an integer.
+ * @return a value that passed through {@link #mix(int)} would give {@code x}.
+ */
+ public static int invMix(final int x) {
+ return (x ^ x >>> 16) * INV_INT_PHI;
+ }
+
+ /** Return the least power of two greater than or equal to the specified value.
+ *
+ * <p>Note that this function will return 1 when the argument is 0.
+ *
+ * @param x an integer smaller than or equal to 2<sup>30</sup>.
+ * @return the least power of two greater than or equal to the specified value.
+ */
+ public static int nextPowerOfTwo(int x) {
+ if (x == 0) {
+ return 1;
+ }
+ x--;
+ x |= x >> 1;
+ x |= x >> 2;
+ x |= x >> 4;
+ x |= x >> 8;
+ return (x | x >> 16) + 1;
+ }
+
+ /** Return the least power of two greater than or equal to the specified value.
+ *
+ * <p>Note that this function will return 1 when the argument is 0.
+ *
+ * @param x a long integer smaller than or equal to 2<sup>62</sup>.
+ * @return the least power of two greater than or equal to the specified value.
+ */
+ public static long nextPowerOfTwo(long x) {
+ if (x == 0) {
+ return 1;
+ }
+ x--;
+ x |= x >> 1;
+ x |= x >> 2;
+ x |= x >> 4;
+ x |= x >> 8;
+ x |= x >> 16;
+ return (x | x >> 32) + 1;
+ }
+
+
+ /** Returns the maximum number of entries that can be filled before rehashing.
+ *
+ * @param n the size of the backing array.
+ * @param f the load factor.
+ * @return the maximum number of entries before rehashing.
+ */
+ public static int maxFill(final int n, final float f) {
+ /* We must guarantee that there is always at least
+ * one free entry (even with pathological load factors). */
+ return Math.min((int) Math.ceil(n * f), n - 1);
+ }
+
+ /**
+ * Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or equal to
+ * <code>Math.ceil( expected / f )</code>.
+ *
+ * @param expected the expected number of elements in a hash table.
+ * @param f the load factor.
+ * @return the minimum possible size for a backing array.
+ * @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>.
+ */
+ public static int arraySize(final int expected, final float f) {
+ final long result = Math.max(2, nextPowerOfTwo((long) Math.ceil(expected / f)));
+ if (result > (1 << 30)) {
+ throw new IllegalArgumentException("Too large (" + expected +
+ " expected elements with load factor " + f + ")");
+ }
+ return (int) result;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/c846a757/log4j-core/src/test/java/org/apache/logging/log4j/core/impl/OpenHashMapContextDataTest.java
----------------------------------------------------------------------
diff --git a/log4j-core/src/test/java/org/apache/logging/log4j/core/impl/OpenHashMapContextDataTest.java b/log4j-core/src/test/java/org/apache/logging/log4j/core/impl/OpenHashMapContextDataTest.java
new file mode 100644
index 0000000..dd76cc3
--- /dev/null
+++ b/log4j-core/src/test/java/org/apache/logging/log4j/core/impl/OpenHashMapContextDataTest.java
@@ -0,0 +1,506 @@
+package org.apache.logging.log4j.core.impl;/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.logging.log4j.core.ContextData;
+import org.apache.logging.log4j.core.util.BiConsumer;
+import org.apache.logging.log4j.core.util.TriConsumer;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+/**
+ * Tests the OpenHashMapContextData class.
+ */
+public class OpenHashMapContextDataTest {
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testConstructorDisallowsNegativeCapacity() throws Exception {
+ new OpenHashMapContextData(-1);
+ }
+
+ @Test
+ public void testConstructorAllowsZeroCapacity() throws Exception {
+ OpenHashMapContextData data = new OpenHashMapContextData(0);
+ assertEquals(2, data.arraySize);
+ }
+
+ @Test(expected = NullPointerException.class)
+ @SuppressWarnings("unchecked")
+ public void testConstructorDisallowsNullMap() throws Exception {
+ assertEquals(0, new OpenHashMapContextData((Map) null).size());
+ }
+
+ @Test(expected = NullPointerException.class)
+ @SuppressWarnings("unchecked")
+ public void testConstructorDisallowsNullContextData() throws Exception {
+ assertEquals(0, new OpenHashMapContextData((ContextData) null).size());
+ }
+
+ @Test
+ public void testToString() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ assertEquals("{B=Bvalue, a=avalue, 3=3value}", original.toString());
+ }
+
+ @Test
+ public void testSerialization() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+
+ final byte[] binary = serialize(original);
+ final OpenHashMapContextData copy = deserialize(binary);
+ assertEquals(original, copy);
+ }
+
+ private byte[] serialize(final OpenHashMapContextData data) throws IOException {
+ final ByteArrayOutputStream arr = new ByteArrayOutputStream();
+ final ObjectOutputStream out = new ObjectOutputStream(arr);
+ out.writeObject(data);
+ return arr.toByteArray();
+ }
+
+ private OpenHashMapContextData deserialize(final byte[] binary) throws IOException, ClassNotFoundException {
+ final ByteArrayInputStream inArr = new ByteArrayInputStream(binary);
+ final ObjectInputStream in = new ObjectInputStream(inArr);
+ final OpenHashMapContextData result = (OpenHashMapContextData) in.readObject();
+ return result;
+ }
+
+ @Test
+ public void testPutAll() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+
+ final OpenHashMapContextData other = new OpenHashMapContextData();
+ other.putAll(original);
+ assertEquals(original, other);
+
+ other.putValue("3", "otherValue");
+ assertNotEquals(original, other);
+
+ other.putValue("3", null);
+ assertNotEquals(original, other);
+
+ other.putValue("3", "3value");
+ assertEquals(original, other);
+ }
+
+ @Test
+ public void testEquals() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ assertEquals(original, original); // equal to itself
+
+ final OpenHashMapContextData other = new OpenHashMapContextData();
+ other.putValue("a", "avalue");
+ assertNotEquals(original, other);
+
+ other.putValue("B", "Bvalue");
+ assertNotEquals(original, other);
+
+ other.putValue("3", "3value");
+ assertEquals(original, other);
+
+ other.putValue("3", "otherValue");
+ assertNotEquals(original, other);
+
+ other.putValue("3", null);
+ assertNotEquals(original, other);
+
+ other.putValue("3", "3value");
+ assertEquals(original, other);
+ }
+
+ @Test
+ public void testAsMap() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+
+ final Map<String, Object> expected = new HashMap<>();
+ expected.put("a", "avalue");
+ expected.put("B", "Bvalue");
+ expected.put("3", "3value");
+
+ assertEquals(expected, original.asMap());
+ }
+
+ @Test
+ public void testGetCopyDelegatesToAsMap() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ assertEquals(original.getCopy(), original.asMap());
+
+ original.putValue("B", "Bvalue");
+ assertEquals(original.getCopy(), original.asMap());
+
+ original.putValue("3", "3value");
+ assertEquals(original.getCopy(), original.asMap());
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testGetImmutableMapOrNull() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ assertEquals(original.getImmutableMapOrNull(), original.asMap());
+
+ original.putValue("B", "Bvalue");
+ assertEquals(original.getImmutableMapOrNull(), original.asMap());
+
+ original.putValue("3", "3value");
+ assertEquals(original.getImmutableMapOrNull(), original.asMap());
+
+ try {
+ original.getImmutableMapOrNull().put("abc", "xyz");
+ fail("Expected map to be immutable");
+ } catch (final UnsupportedOperationException ok) {
+ //ok
+ }
+ }
+
+ @Test
+ public void testPutInsertsInAlphabeticOrder() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.put("a", "avalue");
+ original.put("B", "Bvalue");
+ original.put("3", "3value");
+ original.put("c", "cvalue");
+ original.put("d", "dvalue");
+
+ assertEquals("avalue", original.getValue("a"));
+ assertEquals("Bvalue", original.getValue("B"));
+ assertEquals("3value", original.getValue("3"));
+ assertEquals("cvalue", original.getValue("c"));
+ assertEquals("dvalue", original.getValue("d"));
+ }
+
+ @Test
+ public void testPutValueInsertsInAlphabeticOrder() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ original.putValue("c", "cvalue");
+ original.putValue("d", "dvalue");
+
+ assertEquals("avalue", original.getValue("a"));
+ assertEquals("Bvalue", original.getValue("B"));
+ assertEquals("3value", original.getValue("3"));
+ assertEquals("cvalue", original.getValue("c"));
+ assertEquals("dvalue", original.getValue("d"));
+ }
+
+ @Test
+ public void testNullKeysAllowed() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ original.putValue("c", "cvalue");
+ original.putValue("d", "dvalue");
+ assertEquals(5, original.size());
+ assertEquals("{B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
+
+ original.putValue(null, "nullvalue");
+ assertEquals(6, original.size());
+ assertEquals("{null=nullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
+
+ original.putValue(null, "otherNullvalue");
+ assertEquals("{null=otherNullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
+ assertEquals(6, original.size());
+
+ original.putValue(null, "nullvalue");
+ assertEquals(6, original.size());
+ assertEquals("{null=nullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
+
+ original.putValue(null, "abc");
+ assertEquals(6, original.size());
+ assertEquals("{null=abc, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
+ }
+
+ @Test
+ public void testNullKeysCopiedToAsMap() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ original.putValue("c", "cvalue");
+ original.putValue("d", "dvalue");
+ assertEquals(5, original.size());
+
+ HashMap<String, String> expected = new HashMap<>();
+ expected.put("a", "avalue");
+ expected.put("B", "Bvalue");
+ expected.put("3", "3value");
+ expected.put("c", "cvalue");
+ expected.put("d", "dvalue");
+ assertEquals("initial", expected, original.asMap());
+
+ original.putValue(null, "nullvalue");
+ expected.put(null, "nullvalue");
+ assertEquals(6, original.size());
+ assertEquals("with null key", expected, original.asMap());
+
+ original.putValue(null, "otherNullvalue");
+ expected.put(null, "otherNullvalue");
+ assertEquals(6, original.size());
+ assertEquals("with null key value2", expected, original.asMap());
+
+ original.putValue(null, "nullvalue");
+ expected.put(null, "nullvalue");
+ assertEquals(6, original.size());
+ assertEquals("with null key value1 again", expected, original.asMap());
+
+ original.putValue(null, "abc");
+ expected.put(null, "abc");
+ assertEquals(6, original.size());
+ assertEquals("with null key value3", expected, original.asMap());
+ }
+
+ @Test
+ public void testRemove() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ assertEquals(1, original.size());
+ assertEquals("avalue", original.getValue("a"));
+
+ original.remove("a");
+ assertEquals(0, original.size());
+ assertNull("no a val", original.getValue("a"));
+
+ original.remove("B");
+ assertEquals(0, original.size());
+ assertNull("no B val", original.getValue("B"));
+ }
+
+ @Test
+ public void testNullValuesArePreserved() {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ assertEquals(1, original.size());
+ assertEquals("avalue", original.getValue("a"));
+
+ original.putValue("a", null);
+ assertEquals(1, original.size());
+ assertNull("no a val", original.getValue("a"));
+
+ original.putValue("B", null);
+ assertEquals(2, original.size());
+ assertNull("no B val", original.getValue("B"));
+ }
+
+ @Test
+ public void testGet() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.put("a", "avalue");
+ original.put("B", "Bvalue");
+ original.put("3", "3value");
+
+ assertEquals("avalue", original.get("a"));
+ assertEquals("Bvalue", original.get("B"));
+ assertEquals("3value", original.get("3"));
+
+ original.putValue("0", "0value");
+ assertEquals("0value", original.get("0"));
+ assertEquals("3value", original.get("3"));
+ assertEquals("Bvalue", original.get("B"));
+ assertEquals("avalue", original.get("a"));
+ }
+
+ @Test
+ public void testGetValue_GetValueAt() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+
+ assertEquals("avalue", original.getValue("a"));
+ assertEquals("Bvalue", original.getValue("B"));
+ assertEquals("3value", original.getValue("3"));
+ original.putValue("0", "0value");
+ assertEquals("0value", original.getValue("0"));
+ assertEquals("3value", original.getValue("3"));
+ assertEquals("Bvalue", original.getValue("B"));
+ assertEquals("avalue", original.getValue("a"));
+ }
+
+ @Test
+ public void testClear() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ original.putValue("a", "avalue");
+ original.putValue("B", "Bvalue");
+ original.putValue("3", "3value");
+ assertEquals(3, original.size());
+
+ original.clear();
+ assertEquals(0, original.size());
+
+ // ensure slots in the values array are nulled out
+ Field f = OpenHashMapContextData.class.getDeclaredField("values");
+ f.setAccessible(true);
+ Object[] values = (Object[]) f.get(original);
+ for (int i = 0; i < values.length; i++) {
+ assertNull(values[i]);
+ }
+ }
+
+ @Test
+ public void testContainsKey() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ assertFalse("a", original.containsKey("a"));
+ assertFalse("B", original.containsKey("B"));
+ assertFalse("3", original.containsKey("3"));
+ assertFalse("A", original.containsKey("A"));
+
+ original.putValue("a", "avalue");
+ assertTrue("a", original.containsKey("a"));
+ assertFalse("B", original.containsKey("B"));
+ assertFalse("3", original.containsKey("3"));
+ assertFalse("A", original.containsKey("A"));
+
+ original.putValue("B", "Bvalue");
+ assertTrue("a", original.containsKey("a"));
+ assertTrue("B", original.containsKey("B"));
+ assertFalse("3", original.containsKey("3"));
+ assertFalse("A", original.containsKey("A"));
+
+ original.putValue("3", "3value");
+ assertTrue("a", original.containsKey("a"));
+ assertTrue("B", original.containsKey("B"));
+ assertTrue("3", original.containsKey("3"));
+ assertFalse("A", original.containsKey("A"));
+
+ original.putValue("A", "AAA");
+ assertTrue("a", original.containsKey("a"));
+ assertTrue("B", original.containsKey("B"));
+ assertTrue("3", original.containsKey("3"));
+ assertTrue("A", original.containsKey("A"));
+ }
+
+ @Test
+ public void testSizeAndIsEmpty() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ assertEquals(0, original.size());
+ assertTrue("initial", original.isEmpty());
+
+ original.putValue("a", "avalue");
+ assertEquals(1, original.size());
+ assertFalse("size=" + original.size(), original.isEmpty());
+
+ original.putValue("B", "Bvalue");
+ assertEquals(2, original.size());
+ assertFalse("size=" + original.size(), original.isEmpty());
+
+ original.putValue("3", "3value");
+ assertEquals(3, original.size());
+ assertFalse("size=" + original.size(), original.isEmpty());
+
+ original.remove("B");
+ assertEquals(2, original.size());
+ assertFalse("size=" + original.size(), original.isEmpty());
+
+ original.remove("3");
+ assertEquals(1, original.size());
+ assertFalse("size=" + original.size(), original.isEmpty());
+
+ original.remove("a");
+ assertEquals(0, original.size());
+ assertTrue("size=" + original.size(), original.isEmpty());
+ }
+
+ @Test
+ public void testForEachBiConsumer() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ final List<String> keys = new ArrayList<>(Arrays.asList("a", "B", "3", null));
+ final List<String> values = new ArrayList<>(Arrays.asList("aValue", "Bvalue", "3Value", "nullValue"));
+ for (int i = 0; i < keys.size(); i++) {
+ original.put(keys.get(i), values.get(i));
+ }
+
+ original.forEach(new BiConsumer<String, String>() {
+ int count = 0;
+ @Override
+ public void accept(final String key, final String value) {
+ assertTrue("key exists", keys.remove(key));
+ assertTrue("val exists", values.remove(value));
+ assertEquals("val", value, original.getValue(key));
+ count++;
+ assertTrue("count should not exceed size but was " + count, count <= original.size());
+ }
+ });
+ }
+
+ static class State<K, V> {
+ OpenHashMapContextData data;
+ int count;
+ List<K> keys;
+ List<V> values;
+ }
+ private static TriConsumer<String, String, State> COUNTER = new TriConsumer<String, String, State>() {
+ @Override
+ public void accept(final String key, final String value, final State state) {
+ assertTrue("key exists", state.keys.remove(key));
+ assertTrue("val exists", state.values.remove(value));
+ assertEquals("val", value, state.data.getValue(key));
+ state.count++;
+ assertTrue("count should not exceed size but was " + state.count,
+ state.count <= state.data.size());
+ }
+ };
+
+ @Test
+ public void testForEachTriConsumer() throws Exception {
+ final OpenHashMapContextData original = new OpenHashMapContextData();
+ final List<String> keys = Arrays.asList("a", "B", "3", null);
+ final List<String> values = Arrays.asList("aValue", "Bvalue", "3Value", "nullValue");
+ for (int i = 0; i < keys.size(); i++) {
+ original.put(keys.get(i), values.get(i));
+ }
+ final State state = new State();
+ state.data = original;
+ state.keys = new ArrayList(keys);
+ state.values = new ArrayList(values);
+
+ original.forEach(COUNTER, state);
+ assertEquals(state.count, original.size());
+ assertTrue("all keys", state.keys.isEmpty());
+ assertTrue("all values", state.values.isEmpty());
+ }
+}
\ No newline at end of file