You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@logging.apache.org by rp...@apache.org on 2016/08/27 08:04:34 UTC

[01/10] logging-log4j2 git commit: LOG4J2-1349 added CopyOnWrite marker interface

Repository: logging-log4j2
Updated Branches:
  refs/heads/LOG4J2-1349-gcfree-threadcontext 3832ad9b3 -> 5edcfe823


LOG4J2-1349 added CopyOnWrite marker interface


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/736c8a55
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/736c8a55
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/736c8a55

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 736c8a557b3949f670980d1034738c1de7e55e4e
Parents: 3832ad9
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:54:37 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:54:37 2016 +0900

----------------------------------------------------------------------
 .../main/java/org/apache/logging/log4j/spi/CopyOnWrite.java | 9 +++++++++
 1 file changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/736c8a55/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWrite.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWrite.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWrite.java
new file mode 100644
index 0000000..dfcacc6
--- /dev/null
+++ b/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWrite.java
@@ -0,0 +1,9 @@
+package org.apache.logging.log4j.spi;
+
+/**
+ * Marker interface indicating that the implementing class is a copy-on-write data structure.
+ *
+ * @since 2.7
+ */
+public interface CopyOnWrite {
+}


[05/10] logging-log4j2 git commit: LOG4J2-1349 moved to log4j-perf module

Posted by rp...@apache.org.
LOG4J2-1349 moved to log4j-perf module


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/77897f60
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/77897f60
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/77897f60

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 77897f60a0653d526c96688c3dad39d64efffecf
Parents: 78e8afd
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:57:51 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:57:51 2016 +0900

----------------------------------------------------------------------
 .../log4j/spi/OpenHashMapContextData.java       | 883 ------------------
 .../log4j/perf/nogc/OpenHashMapContextData.java | 886 +++++++++++++++++++
 2 files changed, 886 insertions(+), 883 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/77897f60/log4j-api/src/main/java/org/apache/logging/log4j/spi/OpenHashMapContextData.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/OpenHashMapContextData.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/OpenHashMapContextData.java
deleted file mode 100644
index 72f5622..0000000
--- a/log4j-api/src/main/java/org/apache/logging/log4j/spi/OpenHashMapContextData.java
+++ /dev/null
@@ -1,883 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache license, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the license for the specific language governing permissions and
- * limitations under the license.
- */
-package org.apache.logging.log4j.spi;
-
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.ConcurrentModificationException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-import org.apache.logging.log4j.util.BiConsumer;
-import org.apache.logging.log4j.util.TriConsumer;
-
-/**
- * Open hash map-based implementation of the {@code ContextData} interface.
- * Implementation based on <a href="http://fastutil.di.unimi.it/">fastutil</a>'s
- * <a href="http://fastutil.di.unimi.it/docs/it/unimi/dsi/fastutil/objects/Object2ObjectOpenHashMap.html">Object2ObjectOpenHashMap</a>.
- * <p>
- * A type-specific hash map with a fast, small-footprint implementation.
- *
- * <P>
- * Instances of this class use a hash table to represent a map. The table is
- * filled up to a specified <em>load factor</em>, and then doubled in size to
- * accommodate new entries. If the table is emptied below <em>one fourth</em> of
- * the load factor, it is halved in size. However, halving is not performed when
- * deleting entries from an iterator, as it would interfere with the iteration
- * process.
- *
- * <p>
- * Note that {@link #clear()} does not modify the hash table size. Rather, the
- * {@link #trim(int)} method lets you control the size of
- * the table; this is particularly useful if you reuse instances of this class.
- * <p>
- * <ul>
- *   <li>Garbage-free iteration over key-value pairs with {@code BiConsumer} and {@code TriConsumer}.</li>
- *   <li>Fast copy. If the ThreadContextMap is also an instance of {@code OpenHashMapContextData},
- *     the full thread context data can be transferred with two array copies and five field updates.</li>
- * </ul>
- *
- * @since 2.7
- */
-public class OpenHashMapContextData<K, V> implements MutableContextData, ThreadContextMap {
-    /** The initial default size of a hash table. */
-    public static final int DEFAULT_INITIAL_SIZE = 16;
-
-    /** The default load factor of a hash table. */
-    public static final float DEFAULT_LOAD_FACTOR = .75f;
-
-    private static final long serialVersionUID = -1486744623338827187L;
-
-    /** The array of keys. */
-    protected transient K[] keys;
-    /** The array of values. */
-    protected transient V[] values;
-    /** The mask for wrapping a position counter. */
-    protected transient int mask;
-    /** Whether this set contains the key zero. */
-    protected transient boolean containsNullKey;
-    /** The current table size. */
-    protected transient int arraySize;
-    /**
-     * Threshold after which we rehash. It must be the table size times {@link #loadFactor}.
-     */
-    protected transient int maxFill;
-    /** Number of entries in the set (including the key zero, if present). */
-    protected int size;
-    /** The acceptable load factor. */
-    protected final float loadFactor;
-
-    private V defRetValue = null;
-
-    /**
-     * Creates a new hash map with initial expected
-     * {@link #DEFAULT_INITIAL_SIZE} entries and
-     * {@link #DEFAULT_LOAD_FACTOR} as load factor.
-     */
-    public OpenHashMapContextData() {
-        this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
-    }
-    /**
-     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load factor.
-     *
-     * @param expected
-     *            the expected number of elements in the hash map.
-     */
-    public OpenHashMapContextData(final int expected) {
-        this(expected, DEFAULT_LOAD_FACTOR);
-    }
-    /**
-     * Creates a new hash map.
-     *
-     * <p>
-     * The actual table size will be the least power of two greater than
-     * <code>expected</code>/<code>f</code>.
-     *
-     * @param expected
-     *            the expected number of elements in the hash set.
-     * @param f
-     *            the load factor.
-     */
-    @SuppressWarnings("unchecked")
-    public OpenHashMapContextData(final int expected, final float f) {
-        if (f <= 0 || f > 1) {
-            throw new IllegalArgumentException(
-                    "Load factor must be greater than 0 and smaller than or equal to 1");
-        }
-        if (expected < 0){
-            throw new IllegalArgumentException(
-                    "The expected number of elements must be nonnegative");
-        }
-        this.loadFactor = f;
-        arraySize = HashCommon.arraySize(expected, f);
-        mask = arraySize - 1;
-        maxFill = HashCommon.maxFill(arraySize, f);
-        keys = (K[]) new Object[arraySize + 1];
-        values = (V[]) new Object[arraySize + 1];
-    }
-    /**
-     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
-     * factor copying a given one.
-     *
-     * @param map
-     *            a {@link Map} to be copied into the new hash map.
-     */
-    public OpenHashMapContextData(final Map<? extends K, ? extends V> map) {
-        this(map, DEFAULT_LOAD_FACTOR);
-    }
-    /**
-     * Creates a new hash map copying a given one.
-     *
-     * @param map
-     *            a {@link Map} to be copied into the new hash map.
-     * @param f
-     *            the load factor.
-     */
-    public OpenHashMapContextData(final Map<? extends K, ? extends V> map, final float f) {
-        this(map.size(), f);
-        putAll(map);
-    }
-
-    /**
-     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
-     * factor copying a given type-specific one.
-     *
-     * @param contextData
-     *            a type-specific map to be copied into the new hash map.
-     */
-    public OpenHashMapContextData(final ContextData contextData) {
-        this(contextData, DEFAULT_LOAD_FACTOR);
-    }
-    /**
-     * Creates a new hash map copying a given type-specific one.
-     *
-     * @param contextData
-     *            a type-specific map to be copied into the new hash map.
-     * @param f
-     *            the load factor.
-     */
-    public OpenHashMapContextData(final ContextData contextData, final float f) {
-        this(contextData.size(), f);
-        if (contextData instanceof OpenHashMapContextData) {
-            initFrom0((OpenHashMapContextData) contextData);
-        } else {
-            contextData.forEach(PUT_ALL, this);
-        }
-    }
-    private static final TriConsumer<String, Object, MutableContextData> PUT_ALL =
-            new TriConsumer<String, Object, MutableContextData>() {
-        @Override
-        public void accept(final String key, final Object value, final MutableContextData contextData) {
-            contextData.putValue(key, value);
-        }
-    };
-
-    @SuppressWarnings("unchecked")
-    private void initFrom0(final OpenHashMapContextData other) {
-        // this.loadFactor = other.loadFactor; // final field
-        this.arraySize = other.arraySize;
-        this.size = other.size;
-        this.containsNullKey = other.containsNullKey;
-        this.mask = other.mask;
-        this.maxFill = other.maxFill;
-        keys = (K[]) Arrays.copyOf(other.keys, arraySize + 1);
-        values = (V[]) Arrays.copyOf(other.values, arraySize + 1);
-    }
-
-    private int realSize() {
-        return containsNullKey ? size - 1 : size;
-    }
-
-    private void ensureCapacity(final int capacity) {
-        final int needed = HashCommon.arraySize(capacity, loadFactor);
-        if (needed > arraySize) {
-            rehash(needed);
-        }
-    }
-
-    private void tryCapacity(final long capacity) {
-        final int needed = (int) Math.min(
-                1 << 30, Math.max(2, HashCommon.nextPowerOfTwo((int) Math.ceil(capacity / loadFactor))));
-        if (needed > arraySize) {
-            rehash(needed);
-        }
-    }
-
-    @Override
-    public Map<String, String> asMap() {
-        final Map<String, String> result = new HashMap<>(size);
-        forEach(COPY_INTO_MAP, result);
-        return result;
-    }
-
-    private static final TriConsumer<String, Object, Map<String, String>> COPY_INTO_MAP =
-            new TriConsumer<String, Object, Map<String, String>>() {
-        @Override
-        public void accept(final String k, final Object v, final Map<String, String> map) {
-            map.put(k, v == null ? null : v.toString());
-        }
-    };
-
-    /*
-     * Removes all elements from this map.
-     *
-     * <P>To increase object reuse, this method does not change the table size.
-     * If you want to reduce the table size, you must use {@link #trim()}.
-     */
-    @Override
-    public void clear() {
-        if (size == 0) {
-            return;
-        }
-        size = 0;
-        containsNullKey = false;
-        Arrays.fill(keys, (null));
-        Arrays.fill(values, null);
-    }
-
-    @Override
-    public boolean containsKey(final String key) {
-        return containsObjectKey((Object) key);
-    }
-
-    @SuppressWarnings("unchecked")
-    private boolean containsObjectKey(final Object k) {
-        if (k == null) {
-            return containsNullKey;
-        }
-        K curr;
-        final K[] key = this.keys;
-        int pos;
-        // The starting point.
-        if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
-            return false;
-        }
-        if (k.equals(curr)) {
-            return true;
-        }
-        // There's always an unused entry.
-        while (true) {
-            if ((curr = key[pos = (pos + 1) & mask]) == null) {
-                return false;
-            }
-            if (k.equals(curr)) {
-                return true;
-            }
-        }
-    }
-
-    public boolean equals(final Object obj) {
-        if (obj == this) {
-            return true;
-        }
-        if (!(obj instanceof ContextData)) {
-            return false;
-        }
-        final ContextData other = (ContextData) obj;
-        if (other.size() != size()) {
-            return false;
-        }
-        int pos = arraySize;
-        if (containsNullKey) {
-            if (!Objects.equals(getObjectValue(null), other.getValue(null))) {
-                return false;
-            }
-        }
-        --pos;
-        final K myKeys[] = this.keys;
-        for (; pos >= 0; pos--) {
-            K k;
-            if ((k = myKeys[pos]) != null) {
-                if (!Objects.equals(values[pos], other.getValue((String) k))) {
-                    return false;
-                }
-            }
-        }
-        return true;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public <VAL> void forEach(final BiConsumer<String, ? super VAL> action) {
-        final int startSize = size;
-        final K myKeys[] = this.keys;
-        int pos = arraySize;
-        if (containsNullKey) {
-            action.accept((String) myKeys[pos], (VAL) values[pos]);
-            if (size != startSize) {
-                throw new ConcurrentModificationException();
-            }
-        }
-        --pos;
-        for (; pos >= 0; pos--) {
-            if (myKeys[pos] != null) {
-                action.accept((String) myKeys[pos], (VAL) values[pos]);
-                if (size != startSize) {
-                    throw new ConcurrentModificationException();
-                }
-            }
-        }
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public <VAL, STATE> void forEach(final TriConsumer<String, ? super VAL, STATE> action, final STATE state) {
-        final int startSize = size;
-        final K myKeys[] = this.keys;
-        int pos = arraySize;
-        if (containsNullKey) {
-            action.accept((String) myKeys[pos], (VAL) values[pos], state);
-            if (size != startSize) {
-                throw new ConcurrentModificationException();
-            }
-        }
-        --pos;
-        for (; pos >= 0; pos--) {
-            if (myKeys[pos] != null) {
-                action.accept((String) myKeys[pos], (VAL) values[pos], state);
-                if (size != startSize) {
-                    throw new ConcurrentModificationException();
-                }
-            }
-        }
-    }
-
-    @Override
-    public String get(final String key) {
-        return (String) getObjectValue(key);
-    }
-
-    @SuppressWarnings("unchecked")
-    private V getObjectValue(final Object k) {
-        if (k == null) {
-            return containsNullKey ? values[arraySize] : defRetValue;
-        }
-        K curr;
-        final K[] key = this.keys;
-        int pos;
-        // The starting point.
-        if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
-            return defRetValue;
-        }
-        if (k.equals(curr)) {
-            return values[pos];
-        }
-        // There's always an unused entry.
-        while (true) {
-            if (((curr = key[pos = (pos + 1) & mask]) == null)) {
-                return defRetValue;
-            }
-            if (k.equals(curr)) {
-                return values[pos];
-            }
-        }
-    }
-
-    @Override
-    public Map<String, String> getCopy() {
-        return asMap();
-    }
-
-    @Override
-    public Map<String, String> getImmutableMapOrNull() {
-        return isEmpty() ? null : Collections.unmodifiableMap(asMap());
-    }
-
-    @Override
-    public <VAL> VAL getValue(final String key) {
-        return (VAL) getObjectValue((Object) key);
-    }
-
-    @Override
-    public boolean isEmpty() {
-        return size == 0;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public void put(final String key, final String value) {
-        putObjectValue((K) key, (V) value);
-    }
-
-    private int insert(final K k, final V v) {
-        int pos;
-        if (k == null) {
-            if (containsNullKey) {
-                return arraySize;
-            }
-            containsNullKey = true;
-            pos = arraySize;
-        } else {
-            K curr;
-            final K[] key = this.keys;
-            // The starting point.
-            if (!((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null)) {
-                if (curr.equals(k)) {
-                    return pos;
-                }
-                while (!((curr = key[pos = (pos + 1) & mask]) == null)) {
-                    if (curr.equals(k)) {
-                        return pos;
-                    }
-                }
-            }
-        }
-        keys[pos] = k;
-        values[pos] = v;
-        if (size++ >= maxFill) {
-            rehash(HashCommon.arraySize(size + 1, loadFactor));
-        }
-        return -1;
-    }
-
-    @Override
-    public void putAll(final ContextData source) {
-        if (size() == 0 && source instanceof OpenHashMapContextData) {
-            initFrom0((OpenHashMapContextData) source);
-        } else if (source != null) {
-            source.forEach(PUT_ALL, this);
-        }
-    }
-
-    /** {@inheritDoc} */
-    public void putAll(Map<? extends K, ? extends V> map) {
-        if (loadFactor <= .5) {
-            // The resulting map will be sized for m.size() elements
-            ensureCapacity(map.size());
-        } else {
-            // The resulting map will be tentatively sized for size() +  m.size() elements
-            tryCapacity(size() + map.size());
-        }
-        for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
-            putObjectValue(entry.getKey(), entry.getValue());
-        }
-    }
-
-    private V putObjectValue(final K k, final V v) {
-        final int pos = insert(k, v);
-        if (pos < 0) {
-            return defRetValue;
-        }
-        final V oldValue = values[pos];
-        values[pos] = v;
-        return oldValue;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public void putValue(final String key, final Object value) {
-        putObjectValue((K) key, (V) value);
-    }
-
-    @Override
-    public void remove(final String key) {
-        removeObjectKey((Object) key);
-    }
-
-    @SuppressWarnings("unchecked")
-    private V removeObjectKey(final Object k) {
-        if (k == null) {
-            if (containsNullKey) {
-                return removeNullEntry();
-            }
-            return defRetValue;
-        }
-        final K[] key = this.keys;
-        int pos = HashCommon.mix(k.hashCode()) & mask;
-        K curr = key[pos & mask];
-        // The starting point.
-        if (curr == null) {
-            return defRetValue;
-        }
-        if (k.equals(curr)) {
-            return removeEntry(pos);
-        }
-        while (true) {
-            if ((curr = key[pos = (pos + 1) & mask]) == null) {
-                return defRetValue;
-            }
-            if (k.equals(curr)) {
-                return removeEntry(pos);
-            }
-        }
-    }
-    private V removeEntry(final int pos) {
-        final V oldValue = values[pos];
-        values[pos] = null;
-        size--;
-        shiftKeys(pos);
-        if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
-            rehash(arraySize / 2);
-        }
-        return oldValue;
-    }
-    private V removeNullEntry() {
-        containsNullKey = false;
-        keys[arraySize] = null;
-        final V oldValue = values[arraySize];
-        values[arraySize] = null;
-        size--;
-        if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
-            rehash(arraySize / 2);
-        }
-        return oldValue;
-    }
-    /**
-     * Shifts left entries with the specified hash code, starting at the
-     * specified position, and empties the resulting free entry.
-     *
-     * @param pos
-     *            a starting position.
-     */
-    private void shiftKeys(int pos) {
-        // Shift entries with the same hash.
-        int last, slot;
-        K curr;
-        final K[] myKeys = this.keys;
-        for (;;) {
-            pos = ((last = pos) + 1) & mask;
-            for (;;) {
-                if (((curr = myKeys[pos]) == null)) {
-                    myKeys[last] = (null);
-                    values[last] = null;
-                    return;
-                }
-                slot = HashCommon.mix(curr.hashCode()) & mask;
-                if (last <= pos ? (last >= slot || slot > pos) : (last >= slot && slot > pos)) {
-                    break;
-                }
-                pos = (pos + 1) & mask;
-            }
-            myKeys[last] = curr;
-            values[last] = values[pos];
-        }
-    }
-
-    @Override
-    public int size() {
-        return size;
-    }
-
-    /**
-     * Rehashes this map if the table is too large.
-     *
-     * <P>
-     * Let <var>N</var> be the smallest table size that can hold
-     * <code>max(n,{@link #size()})</code> entries, still satisfying the load
-     * factor. If the current table size is smaller than or equal to
-     * <var>N</var>, this method does nothing. Otherwise, it rehashes this map
-     * in a table of size <var>N</var>.
-     *
-     * <P>
-     * This method is useful when reusing maps. {@linkplain #clear() Clearing a
-     * map} leaves the table size untouched. If you are reusing a map many times,
-     * you can call this method with a typical size to avoid keeping around a
-     * very large table just because of a few large transient maps.
-     *
-     * @param n
-     *            the threshold for the trimming.
-     * @return true if there was enough memory to trim the map.
-     */
-    public boolean trim(final int n) {
-        final int l = HashCommon.nextPowerOfTwo((int) Math.ceil(n / loadFactor));
-        if (l >= n || size > HashCommon.maxFill(l, loadFactor))
-            return true;
-        try {
-            rehash(l);
-        } catch (OutOfMemoryError cantDoIt) { // unusual to catch OOME but in this case appropriate
-            return false;
-        }
-        return true;
-    }
-    /**
-     * Rehashes the map.
-     *
-     * <P>
-     * This method implements the basic rehashing strategy, and may be overriden
-     * by subclasses implementing different rehashing strategies (e.g.,
-     * disk-based rehashing). However, you should not override this method
-     * unless you understand the internal workings of this class.
-     *
-     * @param newN
-     *            the new size
-     */
-    @SuppressWarnings("unchecked")
-    protected void rehash(final int newN) {
-        final K myKeys[] = this.keys;
-        final V myValues[] = this.values;
-        final int mask = newN - 1; // Note that this is used by the hashing
-        // macro
-        final K newKey[] = (K[]) new Object[newN + 1];
-        final V newValue[] = (V[]) new Object[newN + 1];
-        int i = arraySize, pos;
-        for (int j = realSize(); j-- != 0;) {
-            while (myKeys[--i] == null) {
-                // advance i until we find an existing key
-            }
-            if (newKey[pos = HashCommon.mix(myKeys[i].hashCode()) & mask] != null) { // rehash & check slot availability
-                while (newKey[pos = (pos + 1) & mask] != null) {
-                    // find available slot at (or immediately following) pos
-                }
-            }
-            newKey[pos] = myKeys[i];
-            newValue[pos] = myValues[i];
-        }
-        newValue[newN] = myValues[arraySize];
-        arraySize = newN;
-        this.mask = mask;
-        maxFill = HashCommon.maxFill(arraySize, loadFactor);
-        this.keys = newKey;
-        this.values = newValue;
-    }
-
-    /**
-     * Returns a hash code for this map.
-     *
-     * @return a hash code for this map.
-     */
-    public int hashCode() {
-        int result = 0;
-        for (int j = realSize(), i = 0, t = 0; j-- != 0;) {
-            while (keys[i] == null) {
-                i++;
-            }
-            if (this != keys[i]) {
-                t = keys[i].hashCode();
-            }
-            if (this != values[i]) {
-                t ^= (values[i] == null ? 0 : values[i].hashCode());
-            }
-            result += t;
-            i++;
-        }
-        // Zero / null keys have hash zero.
-        if (containsNullKey) {
-            result += (values[arraySize] == null ? 0 : values[arraySize].hashCode());
-        }
-        return result;
-    }
-
-    @SuppressWarnings("unchecked")
-    private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException {
-        s.defaultReadObject();
-        arraySize = HashCommon.arraySize(size, loadFactor);
-        maxFill = HashCommon.maxFill(arraySize, loadFactor);
-        mask = arraySize - 1;
-        final K key[] = this.keys = (K[]) new Object[arraySize + 1];
-        final V value[] = this.values = (V[]) new Object[arraySize + 1];
-        K k;
-        V v;
-        for (int i = size, pos; i-- != 0;) {
-            k = (K) s.readObject();
-            v = (V) s.readObject();
-            if (k == null) {
-                pos = arraySize;
-                containsNullKey = true;
-            } else {
-                pos = HashCommon.mix(k.hashCode()) & mask;
-                while (key[pos] != null) {
-                    pos = (pos + 1) & mask;
-                }
-            }
-            key[pos] = k;
-            value[pos] = v;
-        }
-    }
-
-    private void writeObject(final ObjectOutputStream s) throws IOException {
-        s.defaultWriteObject();
-        try {
-            forEach(SERIALIZER, s);
-        } catch (final RuntimeException runex) {
-            if (runex.getCause() instanceof IOException) {
-                throw (IOException) runex.getCause();
-            }
-            throw runex;
-        }
-    }
-
-    private static final TriConsumer<String, Object, ObjectOutputStream> SERIALIZER =
-            new TriConsumer<String, Object, ObjectOutputStream>() {
-                @Override
-                public void accept(final String k, final Object v, final ObjectOutputStream objectOutputStream) {
-                    try {
-                        objectOutputStream.writeObject(k);
-                        objectOutputStream.writeObject(v);
-                    } catch (final IOException ioex) {
-                        throw new IllegalStateException(ioex);
-                    }
-                }
-            };
-
-    @Override
-    public String toString() {
-        final StringBuilder sb = new StringBuilder(256);
-        sb.append('{');
-        final K myKeys[] = this.keys;
-        int pos = arraySize;
-        boolean first = true;
-        if (containsNullKey) {
-            sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
-            sb.append('=');
-            sb.append(values[pos] == this ? "(this map)" : values[pos]);
-            first = false;
-        }
-        --pos;
-        for (; pos >= 0; pos--) {
-            if (myKeys[pos] != null) {
-                if (first) {
-                    first = false;
-                } else {
-                    sb.append(", ");
-                }
-                sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
-                sb.append('=');
-                sb.append(values[pos] == this ? "(this map)" : values[pos]);
-            }
-        }
-        sb.append('}');
-        return sb.toString();
-    }
-
-    private static class HashCommon {
-        private HashCommon() {}
-
-        /** 2<sup>32</sup> &middot; &phi;, &phi; = (&#x221A;5 &minus; 1)/2. */
-        private static final int INT_PHI = 0x9E3779B9;
-
-        /** The reciprocal of {@link #INT_PHI} modulo 2<sup>32</sup>. */
-        private static final int INV_INT_PHI = 0x144cbc89;
-
-        /** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3.
-         *
-         * <p>This method implements the finalisation step of Austin Appleby's
-         * <a href="http://code.google.com/p/smhasher/">MurmurHash3</a>.
-         * Its purpose is to avalanche the bits of the argument to within 0.25% bias.
-         *
-         * @param x an integer.
-         * @return a hash value with good avalanching properties.
-         */
-        public static int murmurHash3(int x) {
-            x ^= x >>> 16;
-            x *= 0x85ebca6b;
-            x ^= x >>> 13;
-            x *= 0xc2b2ae35;
-            x ^= x >>> 16;
-            return x;
-        }
-
-        /**
-         * Quickly mixes the bits of an integer.
-         *
-         * <p>This method mixes the bits of the argument by multiplying by the golden ratio and
-         * xorshifting the result. It is borrowed from <a href="https://github.com/OpenHFT/Koloboke">Koloboke</a>, and
-         * it has slightly worse behaviour than {@link #murmurHash3(int)} (in open-addressing hash tables the average
-         * number of probes is slightly larger), but it's much faster.
-         *
-         * @param x an integer.
-         * @return a hash value obtained by mixing the bits of {@code x}.
-         * @see #invMix(int)
-         */
-        public static int mix(final int x) {
-            final int h = x * INT_PHI;
-            return h ^ (h >>> 16);
-        }
-
-        /** The inverse of {@link #mix(int)}. This method is mainly useful to create unit tests.
-         *
-         * @param x an integer.
-         * @return a value that passed through {@link #mix(int)} would give {@code x}.
-         */
-        public static int invMix(final int x) {
-            return (x ^ x >>> 16) * INV_INT_PHI;
-        }
-
-        /** Return the least power of two greater than or equal to the specified value.
-         *
-         * <p>Note that this function will return 1 when the argument is 0.
-         *
-         * @param x an integer smaller than or equal to 2<sup>30</sup>.
-         * @return the least power of two greater than or equal to the specified value.
-         */
-        public static int nextPowerOfTwo(int x) {
-            if (x == 0) {
-                return 1;
-            }
-            x--;
-            x |= x >> 1;
-            x |= x >> 2;
-            x |= x >> 4;
-            x |= x >> 8;
-            return (x | x >> 16) + 1;
-        }
-
-        /** Return the least power of two greater than or equal to the specified value.
-         *
-         * <p>Note that this function will return 1 when the argument is 0.
-         *
-         * @param x a long integer smaller than or equal to 2<sup>62</sup>.
-         * @return the least power of two greater than or equal to the specified value.
-         */
-        public static long nextPowerOfTwo(long x) {
-            if (x == 0) {
-                return 1;
-            }
-            x--;
-            x |= x >> 1;
-            x |= x >> 2;
-            x |= x >> 4;
-            x |= x >> 8;
-            x |= x >> 16;
-            return (x | x >> 32) + 1;
-        }
-
-
-        /** Returns the maximum number of entries that can be filled before rehashing.
-         *
-         * @param n the size of the backing array.
-         * @param f the load factor.
-         * @return the maximum number of entries before rehashing.
-         */
-        public static int maxFill(final int n, final float f) {
-		/* We must guarantee that there is always at least
-		 * one free entry (even with pathological load factors). */
-            return Math.min((int) Math.ceil(n * f), n - 1);
-        }
-
-        /**
-         * Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or equal to
-         * <code>Math.ceil( expected / f )</code>.
-         *
-         * @param expected the expected number of elements in a hash table.
-         * @param f the load factor.
-         * @return the minimum possible size for a backing array.
-         * @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>.
-         */
-        public static int arraySize(final int expected, final float f) {
-            final long result = Math.max(2, nextPowerOfTwo((long) Math.ceil(expected / f)));
-            if (result > (1 << 30)) {
-                throw new IllegalArgumentException("Too large (" + expected +
-                        " expected elements with load factor " + f + ")");
-            }
-            return (int) result;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/77897f60/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/OpenHashMapContextData.java
----------------------------------------------------------------------
diff --git a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/OpenHashMapContextData.java b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/OpenHashMapContextData.java
new file mode 100644
index 0000000..26bdf53
--- /dev/null
+++ b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/OpenHashMapContextData.java
@@ -0,0 +1,886 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+package org.apache.logging.log4j.perf.nogc;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.ConcurrentModificationException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.apache.logging.log4j.spi.ContextData;
+import org.apache.logging.log4j.spi.MutableContextData;
+import org.apache.logging.log4j.spi.ThreadContextMap;
+import org.apache.logging.log4j.util.BiConsumer;
+import org.apache.logging.log4j.util.TriConsumer;
+
+/**
+ * Open hash map-based implementation of the {@code ContextData} interface.
+ * Implementation based on <a href="http://fastutil.di.unimi.it/">fastutil</a>'s
+ * <a href="http://fastutil.di.unimi.it/docs/it/unimi/dsi/fastutil/objects/Object2ObjectOpenHashMap.html">Object2ObjectOpenHashMap</a>.
+ * <p>
+ * A type-specific hash map with a fast, small-footprint implementation.
+ *
+ * <P>
+ * Instances of this class use a hash table to represent a map. The table is
+ * filled up to a specified <em>load factor</em>, and then doubled in size to
+ * accommodate new entries. If the table is emptied below <em>one fourth</em> of
+ * the load factor, it is halved in size. However, halving is not performed when
+ * deleting entries from an iterator, as it would interfere with the iteration
+ * process.
+ *
+ * <p>
+ * Note that {@link #clear()} does not modify the hash table size. Rather, the
+ * {@link #trim(int)} method lets you control the size of
+ * the table; this is particularly useful if you reuse instances of this class.
+ * <p>
+ * <ul>
+ *   <li>Garbage-free iteration over key-value pairs with {@code BiConsumer} and {@code TriConsumer}.</li>
+ *   <li>Fast copy. If the ThreadContextMap is also an instance of {@code OpenHashMapContextData},
+ *     the full thread context data can be transferred with two array copies and five field updates.</li>
+ * </ul>
+ *
+ * @since 2.7
+ */
+public class OpenHashMapContextData<K, V> implements MutableContextData, ThreadContextMap {
+    /** The initial default size of a hash table. */
+    public static final int DEFAULT_INITIAL_SIZE = 16;
+
+    /** The default load factor of a hash table. */
+    public static final float DEFAULT_LOAD_FACTOR = .75f;
+
+    private static final long serialVersionUID = -1486744623338827187L;
+
+    /** The array of keys. */
+    protected transient K[] keys;
+    /** The array of values. */
+    protected transient V[] values;
+    /** The mask for wrapping a position counter. */
+    protected transient int mask;
+    /** Whether this set contains the key zero. */
+    protected transient boolean containsNullKey;
+    /** The current table size. */
+    protected transient int arraySize;
+    /**
+     * Threshold after which we rehash. It must be the table size times {@link #loadFactor}.
+     */
+    protected transient int maxFill;
+    /** Number of entries in the set (including the key zero, if present). */
+    protected int size;
+    /** The acceptable load factor. */
+    protected final float loadFactor;
+
+    private V defRetValue = null;
+
+    /**
+     * Creates a new hash map with initial expected
+     * {@link #DEFAULT_INITIAL_SIZE} entries and
+     * {@link #DEFAULT_LOAD_FACTOR} as load factor.
+     */
+    public OpenHashMapContextData() {
+        this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
+    }
+    /**
+     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load factor.
+     *
+     * @param expected
+     *            the expected number of elements in the hash map.
+     */
+    public OpenHashMapContextData(final int expected) {
+        this(expected, DEFAULT_LOAD_FACTOR);
+    }
+    /**
+     * Creates a new hash map.
+     *
+     * <p>
+     * The actual table size will be the least power of two greater than
+     * <code>expected</code>/<code>f</code>.
+     *
+     * @param expected
+     *            the expected number of elements in the hash set.
+     * @param f
+     *            the load factor.
+     */
+    @SuppressWarnings("unchecked")
+    public OpenHashMapContextData(final int expected, final float f) {
+        if (f <= 0 || f > 1) {
+            throw new IllegalArgumentException(
+                    "Load factor must be greater than 0 and smaller than or equal to 1");
+        }
+        if (expected < 0){
+            throw new IllegalArgumentException(
+                    "The expected number of elements must be nonnegative");
+        }
+        this.loadFactor = f;
+        arraySize = HashCommon.arraySize(expected, f);
+        mask = arraySize - 1;
+        maxFill = HashCommon.maxFill(arraySize, f);
+        keys = (K[]) new Object[arraySize + 1];
+        values = (V[]) new Object[arraySize + 1];
+    }
+    /**
+     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
+     * factor copying a given one.
+     *
+     * @param map
+     *            a {@link Map} to be copied into the new hash map.
+     */
+    public OpenHashMapContextData(final Map<? extends K, ? extends V> map) {
+        this(map, DEFAULT_LOAD_FACTOR);
+    }
+    /**
+     * Creates a new hash map copying a given one.
+     *
+     * @param map
+     *            a {@link Map} to be copied into the new hash map.
+     * @param f
+     *            the load factor.
+     */
+    public OpenHashMapContextData(final Map<? extends K, ? extends V> map, final float f) {
+        this(map.size(), f);
+        putAll(map);
+    }
+
+    /**
+     * Creates a new hash map with {@link #DEFAULT_LOAD_FACTOR} as load
+     * factor copying a given type-specific one.
+     *
+     * @param contextData
+     *            a type-specific map to be copied into the new hash map.
+     */
+    public OpenHashMapContextData(final ContextData contextData) {
+        this(contextData, DEFAULT_LOAD_FACTOR);
+    }
+    /**
+     * Creates a new hash map copying a given type-specific one.
+     *
+     * @param contextData
+     *            a type-specific map to be copied into the new hash map.
+     * @param f
+     *            the load factor.
+     */
+    public OpenHashMapContextData(final ContextData contextData, final float f) {
+        this(contextData.size(), f);
+        if (contextData instanceof OpenHashMapContextData) {
+            initFrom0((OpenHashMapContextData) contextData);
+        } else {
+            contextData.forEach(PUT_ALL, this);
+        }
+    }
+    private static final TriConsumer<String, Object, MutableContextData> PUT_ALL =
+            new TriConsumer<String, Object, MutableContextData>() {
+        @Override
+        public void accept(final String key, final Object value, final MutableContextData contextData) {
+            contextData.putValue(key, value);
+        }
+    };
+
+    @SuppressWarnings("unchecked")
+    private void initFrom0(final OpenHashMapContextData other) {
+        // this.loadFactor = other.loadFactor; // final field
+        this.arraySize = other.arraySize;
+        this.size = other.size;
+        this.containsNullKey = other.containsNullKey;
+        this.mask = other.mask;
+        this.maxFill = other.maxFill;
+        keys = (K[]) Arrays.copyOf(other.keys, arraySize + 1);
+        values = (V[]) Arrays.copyOf(other.values, arraySize + 1);
+    }
+
+    private int realSize() {
+        return containsNullKey ? size - 1 : size;
+    }
+
+    private void ensureCapacity(final int capacity) {
+        final int needed = HashCommon.arraySize(capacity, loadFactor);
+        if (needed > arraySize) {
+            rehash(needed);
+        }
+    }
+
+    private void tryCapacity(final long capacity) {
+        final int needed = (int) Math.min(
+                1 << 30, Math.max(2, HashCommon.nextPowerOfTwo((int) Math.ceil(capacity / loadFactor))));
+        if (needed > arraySize) {
+            rehash(needed);
+        }
+    }
+
+    @Override
+    public Map<String, String> asMap() {
+        final Map<String, String> result = new HashMap<>(size);
+        forEach(COPY_INTO_MAP, result);
+        return result;
+    }
+
+    private static final TriConsumer<String, Object, Map<String, String>> COPY_INTO_MAP =
+            new TriConsumer<String, Object, Map<String, String>>() {
+        @Override
+        public void accept(final String k, final Object v, final Map<String, String> map) {
+            map.put(k, v == null ? null : v.toString());
+        }
+    };
+
+    /*
+     * Removes all elements from this map.
+     *
+     * <P>To increase object reuse, this method does not change the table size.
+     * If you want to reduce the table size, you must use {@link #trim()}.
+     */
+    @Override
+    public void clear() {
+        if (size == 0) {
+            return;
+        }
+        size = 0;
+        containsNullKey = false;
+        Arrays.fill(keys, (null));
+        Arrays.fill(values, null);
+    }
+
+    @Override
+    public boolean containsKey(final String key) {
+        return containsObjectKey((Object) key);
+    }
+
+    @SuppressWarnings("unchecked")
+    private boolean containsObjectKey(final Object k) {
+        if (k == null) {
+            return containsNullKey;
+        }
+        K curr;
+        final K[] key = this.keys;
+        int pos;
+        // The starting point.
+        if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
+            return false;
+        }
+        if (k.equals(curr)) {
+            return true;
+        }
+        // There's always an unused entry.
+        while (true) {
+            if ((curr = key[pos = (pos + 1) & mask]) == null) {
+                return false;
+            }
+            if (k.equals(curr)) {
+                return true;
+            }
+        }
+    }
+
+    public boolean equals(final Object obj) {
+        if (obj == this) {
+            return true;
+        }
+        if (!(obj instanceof ContextData)) {
+            return false;
+        }
+        final ContextData other = (ContextData) obj;
+        if (other.size() != size()) {
+            return false;
+        }
+        int pos = arraySize;
+        if (containsNullKey) {
+            if (!Objects.equals(getObjectValue(null), other.getValue(null))) {
+                return false;
+            }
+        }
+        --pos;
+        final K myKeys[] = this.keys;
+        for (; pos >= 0; pos--) {
+            K k;
+            if ((k = myKeys[pos]) != null) {
+                if (!Objects.equals(values[pos], other.getValue((String) k))) {
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public <VAL> void forEach(final BiConsumer<String, ? super VAL> action) {
+        final int startSize = size;
+        final K myKeys[] = this.keys;
+        int pos = arraySize;
+        if (containsNullKey) {
+            action.accept((String) myKeys[pos], (VAL) values[pos]);
+            if (size != startSize) {
+                throw new ConcurrentModificationException();
+            }
+        }
+        --pos;
+        for (; pos >= 0; pos--) {
+            if (myKeys[pos] != null) {
+                action.accept((String) myKeys[pos], (VAL) values[pos]);
+                if (size != startSize) {
+                    throw new ConcurrentModificationException();
+                }
+            }
+        }
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public <VAL, STATE> void forEach(final TriConsumer<String, ? super VAL, STATE> action, final STATE state) {
+        final int startSize = size;
+        final K myKeys[] = this.keys;
+        int pos = arraySize;
+        if (containsNullKey) {
+            action.accept((String) myKeys[pos], (VAL) values[pos], state);
+            if (size != startSize) {
+                throw new ConcurrentModificationException();
+            }
+        }
+        --pos;
+        for (; pos >= 0; pos--) {
+            if (myKeys[pos] != null) {
+                action.accept((String) myKeys[pos], (VAL) values[pos], state);
+                if (size != startSize) {
+                    throw new ConcurrentModificationException();
+                }
+            }
+        }
+    }
+
+    @Override
+    public String get(final String key) {
+        return (String) getObjectValue(key);
+    }
+
+    @SuppressWarnings("unchecked")
+    private V getObjectValue(final Object k) {
+        if (k == null) {
+            return containsNullKey ? values[arraySize] : defRetValue;
+        }
+        K curr;
+        final K[] key = this.keys;
+        int pos;
+        // The starting point.
+        if ((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null) {
+            return defRetValue;
+        }
+        if (k.equals(curr)) {
+            return values[pos];
+        }
+        // There's always an unused entry.
+        while (true) {
+            if (((curr = key[pos = (pos + 1) & mask]) == null)) {
+                return defRetValue;
+            }
+            if (k.equals(curr)) {
+                return values[pos];
+            }
+        }
+    }
+
+    @Override
+    public Map<String, String> getCopy() {
+        return asMap();
+    }
+
+    @Override
+    public Map<String, String> getImmutableMapOrNull() {
+        return isEmpty() ? null : Collections.unmodifiableMap(asMap());
+    }
+
+    @Override
+    public <VAL> VAL getValue(final String key) {
+        return (VAL) getObjectValue((Object) key);
+    }
+
+    @Override
+    public boolean isEmpty() {
+        return size == 0;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public void put(final String key, final String value) {
+        putObjectValue((K) key, (V) value);
+    }
+
+    private int insert(final K k, final V v) {
+        int pos;
+        if (k == null) {
+            if (containsNullKey) {
+                return arraySize;
+            }
+            containsNullKey = true;
+            pos = arraySize;
+        } else {
+            K curr;
+            final K[] key = this.keys;
+            // The starting point.
+            if (!((curr = key[pos = HashCommon.mix(k.hashCode()) & mask]) == null)) {
+                if (curr.equals(k)) {
+                    return pos;
+                }
+                while (!((curr = key[pos = (pos + 1) & mask]) == null)) {
+                    if (curr.equals(k)) {
+                        return pos;
+                    }
+                }
+            }
+        }
+        keys[pos] = k;
+        values[pos] = v;
+        if (size++ >= maxFill) {
+            rehash(HashCommon.arraySize(size + 1, loadFactor));
+        }
+        return -1;
+    }
+
+    @Override
+    public void putAll(final ContextData source) {
+        if (size() == 0 && source instanceof OpenHashMapContextData) {
+            initFrom0((OpenHashMapContextData) source);
+        } else if (source != null) {
+            source.forEach(PUT_ALL, this);
+        }
+    }
+
+    /** {@inheritDoc} */
+    public void putAll(Map<? extends K, ? extends V> map) {
+        if (loadFactor <= .5) {
+            // The resulting map will be sized for m.size() elements
+            ensureCapacity(map.size());
+        } else {
+            // The resulting map will be tentatively sized for size() +  m.size() elements
+            tryCapacity(size() + map.size());
+        }
+        for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
+            putObjectValue(entry.getKey(), entry.getValue());
+        }
+    }
+
+    private V putObjectValue(final K k, final V v) {
+        final int pos = insert(k, v);
+        if (pos < 0) {
+            return defRetValue;
+        }
+        final V oldValue = values[pos];
+        values[pos] = v;
+        return oldValue;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public void putValue(final String key, final Object value) {
+        putObjectValue((K) key, (V) value);
+    }
+
+    @Override
+    public void remove(final String key) {
+        removeObjectKey((Object) key);
+    }
+
+    @SuppressWarnings("unchecked")
+    private V removeObjectKey(final Object k) {
+        if (k == null) {
+            if (containsNullKey) {
+                return removeNullEntry();
+            }
+            return defRetValue;
+        }
+        final K[] key = this.keys;
+        int pos = HashCommon.mix(k.hashCode()) & mask;
+        K curr = key[pos & mask];
+        // The starting point.
+        if (curr == null) {
+            return defRetValue;
+        }
+        if (k.equals(curr)) {
+            return removeEntry(pos);
+        }
+        while (true) {
+            if ((curr = key[pos = (pos + 1) & mask]) == null) {
+                return defRetValue;
+            }
+            if (k.equals(curr)) {
+                return removeEntry(pos);
+            }
+        }
+    }
+    private V removeEntry(final int pos) {
+        final V oldValue = values[pos];
+        values[pos] = null;
+        size--;
+        shiftKeys(pos);
+        if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
+            rehash(arraySize / 2);
+        }
+        return oldValue;
+    }
+    private V removeNullEntry() {
+        containsNullKey = false;
+        keys[arraySize] = null;
+        final V oldValue = values[arraySize];
+        values[arraySize] = null;
+        size--;
+        if (size < maxFill / 4 && arraySize > DEFAULT_INITIAL_SIZE) {
+            rehash(arraySize / 2);
+        }
+        return oldValue;
+    }
+    /**
+     * Shifts left entries with the specified hash code, starting at the
+     * specified position, and empties the resulting free entry.
+     *
+     * @param pos
+     *            a starting position.
+     */
+    private void shiftKeys(int pos) {
+        // Shift entries with the same hash.
+        int last, slot;
+        K curr;
+        final K[] myKeys = this.keys;
+        for (;;) {
+            pos = ((last = pos) + 1) & mask;
+            for (;;) {
+                if (((curr = myKeys[pos]) == null)) {
+                    myKeys[last] = (null);
+                    values[last] = null;
+                    return;
+                }
+                slot = HashCommon.mix(curr.hashCode()) & mask;
+                if (last <= pos ? (last >= slot || slot > pos) : (last >= slot && slot > pos)) {
+                    break;
+                }
+                pos = (pos + 1) & mask;
+            }
+            myKeys[last] = curr;
+            values[last] = values[pos];
+        }
+    }
+
+    @Override
+    public int size() {
+        return size;
+    }
+
+    /**
+     * Rehashes this map if the table is too large.
+     *
+     * <P>
+     * Let <var>N</var> be the smallest table size that can hold
+     * <code>max(n,{@link #size()})</code> entries, still satisfying the load
+     * factor. If the current table size is smaller than or equal to
+     * <var>N</var>, this method does nothing. Otherwise, it rehashes this map
+     * in a table of size <var>N</var>.
+     *
+     * <P>
+     * This method is useful when reusing maps. {@linkplain #clear() Clearing a
+     * map} leaves the table size untouched. If you are reusing a map many times,
+     * you can call this method with a typical size to avoid keeping around a
+     * very large table just because of a few large transient maps.
+     *
+     * @param n
+     *            the threshold for the trimming.
+     * @return true if there was enough memory to trim the map.
+     */
+    public boolean trim(final int n) {
+        final int l = HashCommon.nextPowerOfTwo((int) Math.ceil(n / loadFactor));
+        if (l >= n || size > HashCommon.maxFill(l, loadFactor))
+            return true;
+        try {
+            rehash(l);
+        } catch (OutOfMemoryError cantDoIt) { // unusual to catch OOME but in this case appropriate
+            return false;
+        }
+        return true;
+    }
+    /**
+     * Rehashes the map.
+     *
+     * <P>
+     * This method implements the basic rehashing strategy, and may be overriden
+     * by subclasses implementing different rehashing strategies (e.g.,
+     * disk-based rehashing). However, you should not override this method
+     * unless you understand the internal workings of this class.
+     *
+     * @param newN
+     *            the new size
+     */
+    @SuppressWarnings("unchecked")
+    protected void rehash(final int newN) {
+        final K myKeys[] = this.keys;
+        final V myValues[] = this.values;
+        final int mask = newN - 1; // Note that this is used by the hashing
+        // macro
+        final K newKey[] = (K[]) new Object[newN + 1];
+        final V newValue[] = (V[]) new Object[newN + 1];
+        int i = arraySize, pos;
+        for (int j = realSize(); j-- != 0;) {
+            while (myKeys[--i] == null) {
+                // advance i until we find an existing key
+            }
+            if (newKey[pos = HashCommon.mix(myKeys[i].hashCode()) & mask] != null) { // rehash & check slot availability
+                while (newKey[pos = (pos + 1) & mask] != null) {
+                    // find available slot at (or immediately following) pos
+                }
+            }
+            newKey[pos] = myKeys[i];
+            newValue[pos] = myValues[i];
+        }
+        newValue[newN] = myValues[arraySize];
+        arraySize = newN;
+        this.mask = mask;
+        maxFill = HashCommon.maxFill(arraySize, loadFactor);
+        this.keys = newKey;
+        this.values = newValue;
+    }
+
+    /**
+     * Returns a hash code for this map.
+     *
+     * @return a hash code for this map.
+     */
+    public int hashCode() {
+        int result = 0;
+        for (int j = realSize(), i = 0, t = 0; j-- != 0;) {
+            while (keys[i] == null) {
+                i++;
+            }
+            if (this != keys[i]) {
+                t = keys[i].hashCode();
+            }
+            if (this != values[i]) {
+                t ^= (values[i] == null ? 0 : values[i].hashCode());
+            }
+            result += t;
+            i++;
+        }
+        // Zero / null keys have hash zero.
+        if (containsNullKey) {
+            result += (values[arraySize] == null ? 0 : values[arraySize].hashCode());
+        }
+        return result;
+    }
+
+    @SuppressWarnings("unchecked")
+    private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException {
+        s.defaultReadObject();
+        arraySize = HashCommon.arraySize(size, loadFactor);
+        maxFill = HashCommon.maxFill(arraySize, loadFactor);
+        mask = arraySize - 1;
+        final K key[] = this.keys = (K[]) new Object[arraySize + 1];
+        final V value[] = this.values = (V[]) new Object[arraySize + 1];
+        K k;
+        V v;
+        for (int i = size, pos; i-- != 0;) {
+            k = (K) s.readObject();
+            v = (V) s.readObject();
+            if (k == null) {
+                pos = arraySize;
+                containsNullKey = true;
+            } else {
+                pos = HashCommon.mix(k.hashCode()) & mask;
+                while (key[pos] != null) {
+                    pos = (pos + 1) & mask;
+                }
+            }
+            key[pos] = k;
+            value[pos] = v;
+        }
+    }
+
+    private void writeObject(final ObjectOutputStream s) throws IOException {
+        s.defaultWriteObject();
+        try {
+            forEach(SERIALIZER, s);
+        } catch (final RuntimeException runex) {
+            if (runex.getCause() instanceof IOException) {
+                throw (IOException) runex.getCause();
+            }
+            throw runex;
+        }
+    }
+
+    private static final TriConsumer<String, Object, ObjectOutputStream> SERIALIZER =
+            new TriConsumer<String, Object, ObjectOutputStream>() {
+                @Override
+                public void accept(final String k, final Object v, final ObjectOutputStream objectOutputStream) {
+                    try {
+                        objectOutputStream.writeObject(k);
+                        objectOutputStream.writeObject(v);
+                    } catch (final IOException ioex) {
+                        throw new IllegalStateException(ioex);
+                    }
+                }
+            };
+
+    @Override
+    public String toString() {
+        final StringBuilder sb = new StringBuilder(256);
+        sb.append('{');
+        final K myKeys[] = this.keys;
+        int pos = arraySize;
+        boolean first = true;
+        if (containsNullKey) {
+            sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
+            sb.append('=');
+            sb.append(values[pos] == this ? "(this map)" : values[pos]);
+            first = false;
+        }
+        --pos;
+        for (; pos >= 0; pos--) {
+            if (myKeys[pos] != null) {
+                if (first) {
+                    first = false;
+                } else {
+                    sb.append(", ");
+                }
+                sb.append(myKeys[pos] == this ? "(this map)" : myKeys[pos]);
+                sb.append('=');
+                sb.append(values[pos] == this ? "(this map)" : values[pos]);
+            }
+        }
+        sb.append('}');
+        return sb.toString();
+    }
+
+    private static class HashCommon {
+        private HashCommon() {}
+
+        /** 2<sup>32</sup> &middot; &phi;, &phi; = (&#x221A;5 &minus; 1)/2. */
+        private static final int INT_PHI = 0x9E3779B9;
+
+        /** The reciprocal of {@link #INT_PHI} modulo 2<sup>32</sup>. */
+        private static final int INV_INT_PHI = 0x144cbc89;
+
+        /** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3.
+         *
+         * <p>This method implements the finalisation step of Austin Appleby's
+         * <a href="http://code.google.com/p/smhasher/">MurmurHash3</a>.
+         * Its purpose is to avalanche the bits of the argument to within 0.25% bias.
+         *
+         * @param x an integer.
+         * @return a hash value with good avalanching properties.
+         */
+        public static int murmurHash3(int x) {
+            x ^= x >>> 16;
+            x *= 0x85ebca6b;
+            x ^= x >>> 13;
+            x *= 0xc2b2ae35;
+            x ^= x >>> 16;
+            return x;
+        }
+
+        /**
+         * Quickly mixes the bits of an integer.
+         *
+         * <p>This method mixes the bits of the argument by multiplying by the golden ratio and
+         * xorshifting the result. It is borrowed from <a href="https://github.com/OpenHFT/Koloboke">Koloboke</a>, and
+         * it has slightly worse behaviour than {@link #murmurHash3(int)} (in open-addressing hash tables the average
+         * number of probes is slightly larger), but it's much faster.
+         *
+         * @param x an integer.
+         * @return a hash value obtained by mixing the bits of {@code x}.
+         * @see #invMix(int)
+         */
+        public static int mix(final int x) {
+            final int h = x * INT_PHI;
+            return h ^ (h >>> 16);
+        }
+
+        /** The inverse of {@link #mix(int)}. This method is mainly useful to create unit tests.
+         *
+         * @param x an integer.
+         * @return a value that passed through {@link #mix(int)} would give {@code x}.
+         */
+        public static int invMix(final int x) {
+            return (x ^ x >>> 16) * INV_INT_PHI;
+        }
+
+        /** Return the least power of two greater than or equal to the specified value.
+         *
+         * <p>Note that this function will return 1 when the argument is 0.
+         *
+         * @param x an integer smaller than or equal to 2<sup>30</sup>.
+         * @return the least power of two greater than or equal to the specified value.
+         */
+        public static int nextPowerOfTwo(int x) {
+            if (x == 0) {
+                return 1;
+            }
+            x--;
+            x |= x >> 1;
+            x |= x >> 2;
+            x |= x >> 4;
+            x |= x >> 8;
+            return (x | x >> 16) + 1;
+        }
+
+        /** Return the least power of two greater than or equal to the specified value.
+         *
+         * <p>Note that this function will return 1 when the argument is 0.
+         *
+         * @param x a long integer smaller than or equal to 2<sup>62</sup>.
+         * @return the least power of two greater than or equal to the specified value.
+         */
+        public static long nextPowerOfTwo(long x) {
+            if (x == 0) {
+                return 1;
+            }
+            x--;
+            x |= x >> 1;
+            x |= x >> 2;
+            x |= x >> 4;
+            x |= x >> 8;
+            x |= x >> 16;
+            return (x | x >> 32) + 1;
+        }
+
+
+        /** Returns the maximum number of entries that can be filled before rehashing.
+         *
+         * @param n the size of the backing array.
+         * @param f the load factor.
+         * @return the maximum number of entries before rehashing.
+         */
+        public static int maxFill(final int n, final float f) {
+		/* We must guarantee that there is always at least
+		 * one free entry (even with pathological load factors). */
+            return Math.min((int) Math.ceil(n * f), n - 1);
+        }
+
+        /**
+         * Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or equal to
+         * <code>Math.ceil( expected / f )</code>.
+         *
+         * @param expected the expected number of elements in a hash table.
+         * @param f the load factor.
+         * @return the minimum possible size for a backing array.
+         * @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>.
+         */
+        public static int arraySize(final int expected, final float f) {
+            final long result = Math.max(2, nextPowerOfTwo((long) Math.ceil(expected / f)));
+            if (result > (1 << 30)) {
+                throw new IllegalArgumentException("Too large (" + expected +
+                        " expected elements with load factor " + f + ")");
+            }
+            return (int) result;
+        }
+    }
+}


[06/10] logging-log4j2 git commit: LOG4J2-1349 removed: log4j-perf does not have a test root folder

Posted by rp...@apache.org.
LOG4J2-1349 removed: log4j-perf does not have a test root folder


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/10bd83e2
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/10bd83e2
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/10bd83e2

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 10bd83e2e135672595efc054e2565fa33249ef4c
Parents: 77897f6
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:58:22 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:58:22 2016 +0900

----------------------------------------------------------------------
 .../log4j/spi/OpenHashMapContextDataTest.java   | 507 -------------------
 1 file changed, 507 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/10bd83e2/log4j-api/src/test/java/org/apache/logging/log4j/spi/OpenHashMapContextDataTest.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/test/java/org/apache/logging/log4j/spi/OpenHashMapContextDataTest.java b/log4j-api/src/test/java/org/apache/logging/log4j/spi/OpenHashMapContextDataTest.java
deleted file mode 100644
index a49ca81..0000000
--- a/log4j-api/src/test/java/org/apache/logging/log4j/spi/OpenHashMapContextDataTest.java
+++ /dev/null
@@ -1,507 +0,0 @@
-package org.apache.logging.log4j.spi;/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache license, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the license for the specific language governing permissions and
- * limitations under the license.
- */
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.logging.log4j.spi.ContextData;
-import org.apache.logging.log4j.spi.OpenHashMapContextData;
-import org.apache.logging.log4j.util.BiConsumer;
-import org.apache.logging.log4j.util.TriConsumer;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-
-/**
- * Tests the OpenHashMapContextData class.
- */
-public class OpenHashMapContextDataTest {
-
-    @Test(expected = IllegalArgumentException.class)
-    public void testConstructorDisallowsNegativeCapacity() throws Exception {
-        new OpenHashMapContextData(-1);
-    }
-
-    @Test
-    public void testConstructorAllowsZeroCapacity() throws Exception {
-        OpenHashMapContextData data = new OpenHashMapContextData(0);
-        assertEquals(2, data.arraySize);
-    }
-
-    @Test(expected = NullPointerException.class)
-    @SuppressWarnings("unchecked")
-    public void testConstructorDisallowsNullMap() throws Exception {
-        assertEquals(0, new OpenHashMapContextData((Map) null).size());
-    }
-
-    @Test(expected = NullPointerException.class)
-    @SuppressWarnings("unchecked")
-    public void testConstructorDisallowsNullContextData() throws Exception {
-        assertEquals(0, new OpenHashMapContextData((ContextData) null).size());
-    }
-
-    @Test
-    public void testToString() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        assertEquals("{B=Bvalue, a=avalue, 3=3value}", original.toString());
-    }
-
-    @Test
-    public void testSerialization() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-
-        final byte[] binary = serialize(original);
-        final OpenHashMapContextData copy = deserialize(binary);
-        assertEquals(original, copy);
-    }
-
-    private byte[] serialize(final OpenHashMapContextData data) throws IOException {
-        final ByteArrayOutputStream arr = new ByteArrayOutputStream();
-        final ObjectOutputStream out = new ObjectOutputStream(arr);
-        out.writeObject(data);
-        return arr.toByteArray();
-    }
-
-    private OpenHashMapContextData deserialize(final byte[] binary) throws IOException, ClassNotFoundException {
-        final ByteArrayInputStream inArr = new ByteArrayInputStream(binary);
-        final ObjectInputStream in = new ObjectInputStream(inArr);
-        final OpenHashMapContextData result = (OpenHashMapContextData) in.readObject();
-        return result;
-    }
-
-    @Test
-    public void testPutAll() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-
-        final OpenHashMapContextData other = new OpenHashMapContextData();
-        other.putAll(original);
-        assertEquals(original, other);
-
-        other.putValue("3", "otherValue");
-        assertNotEquals(original, other);
-
-        other.putValue("3", null);
-        assertNotEquals(original, other);
-
-        other.putValue("3", "3value");
-        assertEquals(original, other);
-    }
-
-    @Test
-    public void testEquals() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        assertEquals(original, original); // equal to itself
-
-        final OpenHashMapContextData other = new OpenHashMapContextData();
-        other.putValue("a", "avalue");
-        assertNotEquals(original, other);
-
-        other.putValue("B", "Bvalue");
-        assertNotEquals(original, other);
-
-        other.putValue("3", "3value");
-        assertEquals(original, other);
-
-        other.putValue("3", "otherValue");
-        assertNotEquals(original, other);
-
-        other.putValue("3", null);
-        assertNotEquals(original, other);
-
-        other.putValue("3", "3value");
-        assertEquals(original, other);
-    }
-
-    @Test
-    public void testAsMap() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-
-        final Map<String, Object> expected = new HashMap<>();
-        expected.put("a", "avalue");
-        expected.put("B", "Bvalue");
-        expected.put("3", "3value");
-
-        assertEquals(expected, original.asMap());
-    }
-
-    @Test
-    public void testGetCopyDelegatesToAsMap() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        assertEquals(original.getCopy(), original.asMap());
-
-        original.putValue("B", "Bvalue");
-        assertEquals(original.getCopy(), original.asMap());
-
-        original.putValue("3", "3value");
-        assertEquals(original.getCopy(), original.asMap());
-    }
-
-    @Test
-    @SuppressWarnings("unchecked")
-    public void testGetImmutableMapOrNull() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        assertEquals(original.getImmutableMapOrNull(), original.asMap());
-
-        original.putValue("B", "Bvalue");
-        assertEquals(original.getImmutableMapOrNull(), original.asMap());
-
-        original.putValue("3", "3value");
-        assertEquals(original.getImmutableMapOrNull(), original.asMap());
-
-        try {
-            original.getImmutableMapOrNull().put("abc", "xyz");
-            fail("Expected map to be immutable");
-        } catch (final UnsupportedOperationException ok) {
-            //ok
-        }
-    }
-
-    @Test
-    public void testPutInsertsInAlphabeticOrder() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.put("a", "avalue");
-        original.put("B", "Bvalue");
-        original.put("3", "3value");
-        original.put("c", "cvalue");
-        original.put("d", "dvalue");
-
-        assertEquals("avalue", original.getValue("a"));
-        assertEquals("Bvalue", original.getValue("B"));
-        assertEquals("3value", original.getValue("3"));
-        assertEquals("cvalue", original.getValue("c"));
-        assertEquals("dvalue", original.getValue("d"));
-    }
-
-    @Test
-    public void testPutValueInsertsInAlphabeticOrder() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        original.putValue("c", "cvalue");
-        original.putValue("d", "dvalue");
-
-        assertEquals("avalue", original.getValue("a"));
-        assertEquals("Bvalue", original.getValue("B"));
-        assertEquals("3value", original.getValue("3"));
-        assertEquals("cvalue", original.getValue("c"));
-        assertEquals("dvalue", original.getValue("d"));
-    }
-
-    @Test
-    public void testNullKeysAllowed() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        original.putValue("c", "cvalue");
-        original.putValue("d", "dvalue");
-        assertEquals(5, original.size());
-        assertEquals("{B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
-
-        original.putValue(null, "nullvalue");
-        assertEquals(6, original.size());
-        assertEquals("{null=nullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
-
-        original.putValue(null, "otherNullvalue");
-        assertEquals("{null=otherNullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
-        assertEquals(6, original.size());
-
-        original.putValue(null, "nullvalue");
-        assertEquals(6, original.size());
-        assertEquals("{null=nullvalue, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
-
-        original.putValue(null, "abc");
-        assertEquals(6, original.size());
-        assertEquals("{null=abc, B=Bvalue, a=avalue, 3=3value, d=dvalue, c=cvalue}", original.toString());
-    }
-
-    @Test
-    public void testNullKeysCopiedToAsMap() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        original.putValue("c", "cvalue");
-        original.putValue("d", "dvalue");
-        assertEquals(5, original.size());
-
-        HashMap<String, String> expected = new HashMap<>();
-        expected.put("a", "avalue");
-        expected.put("B", "Bvalue");
-        expected.put("3", "3value");
-        expected.put("c", "cvalue");
-        expected.put("d", "dvalue");
-        assertEquals("initial", expected, original.asMap());
-
-        original.putValue(null, "nullvalue");
-        expected.put(null, "nullvalue");
-        assertEquals(6, original.size());
-        assertEquals("with null key", expected, original.asMap());
-
-        original.putValue(null, "otherNullvalue");
-        expected.put(null, "otherNullvalue");
-        assertEquals(6, original.size());
-        assertEquals("with null key value2", expected, original.asMap());
-
-        original.putValue(null, "nullvalue");
-        expected.put(null, "nullvalue");
-        assertEquals(6, original.size());
-        assertEquals("with null key value1 again", expected, original.asMap());
-
-        original.putValue(null, "abc");
-        expected.put(null, "abc");
-        assertEquals(6, original.size());
-        assertEquals("with null key value3", expected, original.asMap());
-    }
-
-    @Test
-    public void testRemove() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        assertEquals(1, original.size());
-        assertEquals("avalue", original.getValue("a"));
-
-        original.remove("a");
-        assertEquals(0, original.size());
-        assertNull("no a val", original.getValue("a"));
-
-        original.remove("B");
-        assertEquals(0, original.size());
-        assertNull("no B val", original.getValue("B"));
-    }
-
-    @Test
-    public void testNullValuesArePreserved() {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        assertEquals(1, original.size());
-        assertEquals("avalue", original.getValue("a"));
-
-        original.putValue("a", null);
-        assertEquals(1, original.size());
-        assertNull("no a val", original.getValue("a"));
-
-        original.putValue("B", null);
-        assertEquals(2, original.size());
-        assertNull("no B val", original.getValue("B"));
-    }
-
-    @Test
-    public void testGet() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.put("a", "avalue");
-        original.put("B", "Bvalue");
-        original.put("3", "3value");
-
-        assertEquals("avalue", original.get("a"));
-        assertEquals("Bvalue", original.get("B"));
-        assertEquals("3value", original.get("3"));
-
-        original.putValue("0", "0value");
-        assertEquals("0value", original.get("0"));
-        assertEquals("3value", original.get("3"));
-        assertEquals("Bvalue", original.get("B"));
-        assertEquals("avalue", original.get("a"));
-    }
-
-    @Test
-    public void testGetValue_GetValueAt() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-
-        assertEquals("avalue", original.getValue("a"));
-        assertEquals("Bvalue", original.getValue("B"));
-        assertEquals("3value", original.getValue("3"));
-        original.putValue("0", "0value");
-        assertEquals("0value", original.getValue("0"));
-        assertEquals("3value", original.getValue("3"));
-        assertEquals("Bvalue", original.getValue("B"));
-        assertEquals("avalue", original.getValue("a"));
-    }
-
-    @Test
-    public void testClear() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        original.putValue("a", "avalue");
-        original.putValue("B", "Bvalue");
-        original.putValue("3", "3value");
-        assertEquals(3, original.size());
-
-        original.clear();
-        assertEquals(0, original.size());
-
-        // ensure slots in the values array are nulled out
-        Field f = OpenHashMapContextData.class.getDeclaredField("values");
-        f.setAccessible(true);
-        Object[] values = (Object[]) f.get(original);
-        for (int i = 0; i < values.length; i++) {
-            assertNull(values[i]);
-        }
-    }
-
-    @Test
-    public void testContainsKey() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        assertFalse("a", original.containsKey("a"));
-        assertFalse("B", original.containsKey("B"));
-        assertFalse("3", original.containsKey("3"));
-        assertFalse("A", original.containsKey("A"));
-
-        original.putValue("a", "avalue");
-        assertTrue("a", original.containsKey("a"));
-        assertFalse("B", original.containsKey("B"));
-        assertFalse("3", original.containsKey("3"));
-        assertFalse("A", original.containsKey("A"));
-
-        original.putValue("B", "Bvalue");
-        assertTrue("a", original.containsKey("a"));
-        assertTrue("B", original.containsKey("B"));
-        assertFalse("3", original.containsKey("3"));
-        assertFalse("A", original.containsKey("A"));
-
-        original.putValue("3", "3value");
-        assertTrue("a", original.containsKey("a"));
-        assertTrue("B", original.containsKey("B"));
-        assertTrue("3", original.containsKey("3"));
-        assertFalse("A", original.containsKey("A"));
-
-        original.putValue("A", "AAA");
-        assertTrue("a", original.containsKey("a"));
-        assertTrue("B", original.containsKey("B"));
-        assertTrue("3", original.containsKey("3"));
-        assertTrue("A", original.containsKey("A"));
-    }
-
-    @Test
-    public void testSizeAndIsEmpty() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        assertEquals(0, original.size());
-        assertTrue("initial", original.isEmpty());
-
-        original.putValue("a", "avalue");
-        assertEquals(1, original.size());
-        assertFalse("size=" + original.size(), original.isEmpty());
-
-        original.putValue("B", "Bvalue");
-        assertEquals(2, original.size());
-        assertFalse("size=" + original.size(), original.isEmpty());
-
-        original.putValue("3", "3value");
-        assertEquals(3, original.size());
-        assertFalse("size=" + original.size(), original.isEmpty());
-
-        original.remove("B");
-        assertEquals(2, original.size());
-        assertFalse("size=" + original.size(), original.isEmpty());
-
-        original.remove("3");
-        assertEquals(1, original.size());
-        assertFalse("size=" + original.size(), original.isEmpty());
-
-        original.remove("a");
-        assertEquals(0, original.size());
-        assertTrue("size=" + original.size(), original.isEmpty());
-    }
-
-    @Test
-    public void testForEachBiConsumer() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        final List<String> keys = new ArrayList<>(Arrays.asList("a", "B", "3", null));
-        final List<String> values = new ArrayList<>(Arrays.asList("aValue", "Bvalue", "3Value", "nullValue"));
-        for (int i = 0; i < keys.size(); i++) {
-            original.put(keys.get(i), values.get(i));
-        }
-
-        original.forEach(new BiConsumer<String, String>() {
-            int count = 0;
-            @Override
-            public void accept(final String key, final String value) {
-                assertTrue("key exists", keys.remove(key));
-                assertTrue("val exists", values.remove(value));
-                assertEquals("val", value, original.getValue(key));
-                count++;
-                assertTrue("count should not exceed size but was " + count, count <= original.size());
-            }
-        });
-    }
-
-    static class State<K, V> {
-        OpenHashMapContextData data;
-        int count;
-        List<K> keys;
-        List<V> values;
-    }
-    private static TriConsumer<String, String, State> COUNTER = new TriConsumer<String, String, State>() {
-        @Override
-        public void accept(final String key, final String value, final State state) {
-            assertTrue("key exists", state.keys.remove(key));
-            assertTrue("val exists", state.values.remove(value));
-            assertEquals("val", value, state.data.getValue(key));
-            state.count++;
-            assertTrue("count should not exceed size but was " + state.count,
-                    state.count <= state.data.size());
-        }
-    };
-
-    @Test
-    public void testForEachTriConsumer() throws Exception {
-        final OpenHashMapContextData original = new OpenHashMapContextData();
-        final List<String> keys = Arrays.asList("a", "B", "3", null);
-        final List<String> values = Arrays.asList("aValue", "Bvalue", "3Value", "nullValue");
-        for (int i = 0; i < keys.size(); i++) {
-            original.put(keys.get(i), values.get(i));
-        }
-        final State state = new State();
-        state.data = original;
-        state.keys = new ArrayList(keys);
-        state.values = new ArrayList(values);
-
-        original.forEach(COUNTER, state);
-        assertEquals(state.count, original.size());
-        assertTrue("all keys", state.keys.isEmpty());
-        assertTrue("all values", state.values.isEmpty());
-    }
-}
\ No newline at end of file


[03/10] logging-log4j2 git commit: LOG4J2-1349 implement CopyOnWrite, MutableContextDataSupplier interfaces

Posted by rp...@apache.org.
LOG4J2-1349 implement CopyOnWrite, MutableContextDataSupplier interfaces


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/eafc25bf
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/eafc25bf
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/eafc25bf

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: eafc25bfe4a6d5f83dcc099efff8d5443ecc1f78
Parents: cdbf383
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:56:22 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:56:22 2016 +0900

----------------------------------------------------------------------
 .../log4j/spi/AbstractCopyOnWriteMutableThreadContext.java  | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/eafc25bf/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractCopyOnWriteMutableThreadContext.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractCopyOnWriteMutableThreadContext.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractCopyOnWriteMutableThreadContext.java
index ffc09ac..c71ed95 100644
--- a/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractCopyOnWriteMutableThreadContext.java
+++ b/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractCopyOnWriteMutableThreadContext.java
@@ -30,7 +30,8 @@ import org.apache.logging.log4j.util.PropertiesUtil;
  *
  * @since 2.7
  */
-public abstract class AbstractCopyOnWriteMutableThreadContext implements ThreadContextMap, ThreadContextMap2 {
+public abstract class AbstractCopyOnWriteMutableThreadContext implements ThreadContextMap, ThreadContextMap2,
+        CopyOnWrite, MutableContextDataSupplier {
 
     /**
      * The default initial capacity.
@@ -129,7 +130,11 @@ public abstract class AbstractCopyOnWriteMutableThreadContext implements ThreadC
         return map == null ? Collections.<String, String>emptyMap() : map.asMap();
     }
 
-    public MutableContextData getContextData() {
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public MutableContextData getMutableContextData() {
         return localMap.get();
     }
 


[02/10] logging-log4j2 git commit: LOG4J2-1349 added MutableContextDataSupplier interface for custom ContextInjectors

Posted by rp...@apache.org.
LOG4J2-1349 added MutableContextDataSupplier interface for custom ContextInjectors


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/cdbf3835
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/cdbf3835
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/cdbf3835

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: cdbf38353509f56f680d089a5fdbd880984ed205
Parents: 736c8a5
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:55:24 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:55:24 2016 +0900

----------------------------------------------------------------------
 .../log4j/spi/MutableContextDataSupplier.java        | 15 +++++++++++++++
 1 file changed, 15 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/cdbf3835/log4j-api/src/main/java/org/apache/logging/log4j/spi/MutableContextDataSupplier.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/MutableContextDataSupplier.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/MutableContextDataSupplier.java
new file mode 100644
index 0000000..3224468
--- /dev/null
+++ b/log4j-api/src/main/java/org/apache/logging/log4j/spi/MutableContextDataSupplier.java
@@ -0,0 +1,15 @@
+package org.apache.logging.log4j.spi;
+
+/**
+ * Interface for objects that know how to provide a {@code MutableContextData} object.
+ *
+ * @since 2.7
+ */
+public interface MutableContextDataSupplier {
+
+    /**
+     * Retuns the {@code MutableContextData}.
+     * @return the {@code MutableContextData}
+     */
+    MutableContextData getMutableContextData();
+}


[04/10] logging-log4j2 git commit: LOG4J2-1349 implement MutableContextDataSupplier interface

Posted by rp...@apache.org.
LOG4J2-1349 implement MutableContextDataSupplier interface


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/78e8afde
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/78e8afde
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/78e8afde

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 78e8afde00a5a9abdc961ba9f965c1393dd4c189
Parents: eafc25b
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:56:56 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:56:56 2016 +0900

----------------------------------------------------------------------
 .../log4j/spi/AbstractGarbageFreeMutableThreadContext.java  | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/78e8afde/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractGarbageFreeMutableThreadContext.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractGarbageFreeMutableThreadContext.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractGarbageFreeMutableThreadContext.java
index 95dd30a..4e86761 100644
--- a/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractGarbageFreeMutableThreadContext.java
+++ b/log4j-api/src/main/java/org/apache/logging/log4j/spi/AbstractGarbageFreeMutableThreadContext.java
@@ -30,7 +30,8 @@ import org.apache.logging.log4j.util.PropertiesUtil;
  * </p>
  * @since 2.7
  */
-public abstract class AbstractGarbageFreeMutableThreadContext implements ThreadContextMap, ThreadContextMap2 {
+public abstract class AbstractGarbageFreeMutableThreadContext implements ThreadContextMap, ThreadContextMap2,
+        MutableContextDataSupplier {
 
     /**
      * The default initial capacity.
@@ -131,7 +132,11 @@ public abstract class AbstractGarbageFreeMutableThreadContext implements ThreadC
         return map == null ? Collections.<String, String>emptyMap() : map.asMap();
     }
 
-    public MutableContextData getContextData() {
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public MutableContextData getMutableContextData() {
         return localMap.get();
     }
 


[09/10] logging-log4j2 git commit: LOG4J2-1349 updated imports for moved classes

Posted by rp...@apache.org.
LOG4J2-1349 updated imports for moved classes


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/0ae2ab8c
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/0ae2ab8c
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/0ae2ab8c

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 0ae2ab8c7f6e8aa7c7a3929bc89e0e87ca618a11
Parents: 1f75dc1
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 17:01:58 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 17:01:58 2016 +0900

----------------------------------------------------------------------
 .../log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java     | 2 +-
 .../apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java  | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/0ae2ab8c/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java
----------------------------------------------------------------------
diff --git a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java
index eb354a5..fae27de 100644
--- a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java
+++ b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ArrayContextDataVsHashMapBenchmark.java
@@ -23,7 +23,7 @@ import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.logging.log4j.spi.ArrayContextData;
-import org.apache.logging.log4j.spi.OpenHashMapContextData;
+import org.apache.logging.log4j.perf.nogc.OpenHashMapContextData;
 import org.apache.logging.log4j.util.BiConsumer;
 import org.apache.logging.log4j.util.TriConsumer;
 import org.openjdk.jmh.annotations.Benchmark;

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/0ae2ab8c/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java
----------------------------------------------------------------------
diff --git a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java
index f2626c9..cfa2eb7 100644
--- a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java
+++ b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadContextBenchmark.java
@@ -31,13 +31,13 @@ import org.apache.logging.log4j.core.config.Property;
 import org.apache.logging.log4j.core.impl.ContextDataInjector;
 import org.apache.logging.log4j.core.impl.ContextDataInjectorFactory;
 import org.apache.logging.log4j.spi.ArrayContextData;
-import org.apache.logging.log4j.spi.CopyOnWriteOpenHashMapThreadContextMap;
+import org.apache.logging.log4j.perf.nogc.CopyOnWriteOpenHashMapThreadContextMap;
 import org.apache.logging.log4j.spi.CopyOnWriteSortedArrayThreadContextMap;
 import org.apache.logging.log4j.spi.DefaultThreadContextMap;
-import org.apache.logging.log4j.spi.GarbageFreeOpenHashMapThreadContextMap;
+import org.apache.logging.log4j.perf.nogc.GarbageFreeOpenHashMapThreadContextMap;
 import org.apache.logging.log4j.spi.GarbageFreeSortedArrayThreadContextMap;
 import org.apache.logging.log4j.spi.MutableContextData;
-import org.apache.logging.log4j.spi.OpenHashMapContextData;
+import org.apache.logging.log4j.perf.nogc.OpenHashMapContextData;
 import org.apache.logging.log4j.spi.ThreadContextMap;
 import org.openjdk.jmh.annotations.Benchmark;
 import org.openjdk.jmh.annotations.BenchmarkMode;


[08/10] logging-log4j2 git commit: LOG4J2-1349 remove link to removed class OpenHashMapContextData

Posted by rp...@apache.org.
LOG4J2-1349 remove link to removed class OpenHashMapContextData


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/1f75dc15
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/1f75dc15
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/1f75dc15

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 1f75dc155a1086efd9dd1c19cab37f26f2b1f5f6
Parents: fee7f2e
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 17:00:35 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 17:00:35 2016 +0900

----------------------------------------------------------------------
 .../org/apache/logging/log4j/core/impl/ContextDataFactory.java     | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/1f75dc15/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataFactory.java
----------------------------------------------------------------------
diff --git a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataFactory.java b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataFactory.java
index 340298a..6661863 100644
--- a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataFactory.java
+++ b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataFactory.java
@@ -18,7 +18,6 @@ package org.apache.logging.log4j.core.impl;
 
 import org.apache.logging.log4j.spi.ArrayContextData;
 import org.apache.logging.log4j.spi.MutableContextData;
-import org.apache.logging.log4j.spi.OpenHashMapContextData;
 import org.apache.logging.log4j.util.PropertiesUtil;
 
 /**
@@ -30,7 +29,6 @@ import org.apache.logging.log4j.util.PropertiesUtil;
  * </p>
  *
  * @see ArrayContextData
- * @see OpenHashMapContextData
  * @since 2.7
  */
 public class ContextDataFactory {


[10/10] logging-log4j2 git commit: LOG4J2-1349 use the new CopyOnWrite, MutableContextDataSupplier interfaces to determine which strategy to use for injecting context data from the ThreadContextMap

Posted by rp...@apache.org.
LOG4J2-1349 use the new CopyOnWrite, MutableContextDataSupplier interfaces to determine which strategy to use for injecting context data from the ThreadContextMap


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/5edcfe82
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/5edcfe82
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/5edcfe82

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: 5edcfe8234ef78b00d297615c592fbcc2a7c77c0
Parents: 0ae2ab8
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 17:04:26 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 17:04:26 2016 +0900

----------------------------------------------------------------------
 .../log4j/core/impl/ContextDataInjectorFactory.java      |  8 ++++----
 .../log4j/core/impl/ThreadContextDataInjector.java       | 11 +++++------
 2 files changed, 9 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/5edcfe82/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataInjectorFactory.java
----------------------------------------------------------------------
diff --git a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataInjectorFactory.java b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataInjectorFactory.java
index fa1c28f..947fbb0 100644
--- a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataInjectorFactory.java
+++ b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ContextDataInjectorFactory.java
@@ -16,12 +16,12 @@
  */
 package org.apache.logging.log4j.core.impl;
 
-import org.apache.logging.log4j.ThreadContext;
 import org.apache.logging.log4j.ThreadContextAccess;
 import org.apache.logging.log4j.core.LogEvent;
-import org.apache.logging.log4j.spi.AbstractCopyOnWriteMutableThreadContext;
 import org.apache.logging.log4j.spi.AbstractGarbageFreeMutableThreadContext;
 import org.apache.logging.log4j.spi.ContextData;
+import org.apache.logging.log4j.spi.CopyOnWrite;
+import org.apache.logging.log4j.spi.MutableContextDataSupplier;
 import org.apache.logging.log4j.spi.ThreadContextMap;
 import org.apache.logging.log4j.status.StatusLogger;
 import org.apache.logging.log4j.util.LoaderUtil;
@@ -73,10 +73,10 @@ public class ContextDataInjectorFactory {
 
     private static ContextDataInjector createDefaultInjector() {
         final ThreadContextMap threadContextMap = ThreadContextAccess.getThreadContextMap();
-        if (threadContextMap instanceof AbstractCopyOnWriteMutableThreadContext) {
+        if (threadContextMap instanceof CopyOnWrite && threadContextMap instanceof MutableContextDataSupplier) {
             return new ThreadContextDataInjector.ForCopyOnWriteMutableThreadContextMap();
         }
-        if (threadContextMap instanceof AbstractGarbageFreeMutableThreadContext) {
+        if (threadContextMap instanceof MutableContextDataSupplier) {
             return new ThreadContextDataInjector.ForGarbageFreeMutableThreadContextMap();
         }
         return new ThreadContextDataInjector.ForDefaultThreadContextMap();

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/5edcfe82/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThreadContextDataInjector.java
----------------------------------------------------------------------
diff --git a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThreadContextDataInjector.java b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThreadContextDataInjector.java
index 6c56520..eeaaf4c 100644
--- a/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThreadContextDataInjector.java
+++ b/log4j-core/src/main/java/org/apache/logging/log4j/core/impl/ThreadContextDataInjector.java
@@ -22,10 +22,9 @@ import java.util.Map;
 import org.apache.logging.log4j.ThreadContext;
 import org.apache.logging.log4j.ThreadContextAccess;
 import org.apache.logging.log4j.core.config.Property;
-import org.apache.logging.log4j.spi.AbstractCopyOnWriteMutableThreadContext;
-import org.apache.logging.log4j.spi.AbstractGarbageFreeMutableThreadContext;
 import org.apache.logging.log4j.spi.ContextData;
 import org.apache.logging.log4j.spi.MutableContextData;
+import org.apache.logging.log4j.spi.MutableContextDataSupplier;
 
 /**
  * {@code ThreadContextDataInjector} contains a number of strategies for copying key-value pairs from the various
@@ -107,8 +106,8 @@ public class ThreadContextDataInjector  {
             // modified.
             copyProperties(props, reusable);
 
-            final ContextData immutableCopy = ((AbstractGarbageFreeMutableThreadContext)
-                    ThreadContextAccess.getThreadContextMap()).getContextData();
+            final ContextData immutableCopy = ((MutableContextDataSupplier) ThreadContextAccess.getThreadContextMap())
+                    .getMutableContextData();
             reusable.putAll(immutableCopy);
             return reusable;
         }
@@ -136,8 +135,8 @@ public class ThreadContextDataInjector  {
         public MutableContextData injectContextData(final List<Property> props, final MutableContextData reusable) {
             // If there are no configuration properties we want to just return the ThreadContext's MutableContextData:
             // it is a copy-on-write data structure so we are sure ThreadContext changes will not affect our copy.
-            final MutableContextData immutableCopy = ((AbstractCopyOnWriteMutableThreadContext)
-                    ThreadContextAccess.getThreadContextMap()).getContextData();
+            final MutableContextData immutableCopy =
+                    ((MutableContextDataSupplier) ThreadContextAccess.getThreadContextMap()).getMutableContextData();
             if (props == null || props.isEmpty()) {
                 return immutableCopy;
             }


[07/10] logging-log4j2 git commit: LOG4J2-1349 moved to log4j-perf module

Posted by rp...@apache.org.
LOG4J2-1349 moved to log4j-perf module


Project: http://git-wip-us.apache.org/repos/asf/logging-log4j2/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4j2/commit/fee7f2e2
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4j2/tree/fee7f2e2
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4j2/diff/fee7f2e2

Branch: refs/heads/LOG4J2-1349-gcfree-threadcontext
Commit: fee7f2e21f359f477196a980b7df861b09a35a95
Parents: 10bd83e
Author: rpopma <rp...@apache.org>
Authored: Sat Aug 27 16:59:23 2016 +0900
Committer: rpopma <rp...@apache.org>
Committed: Sat Aug 27 16:59:23 2016 +0900

----------------------------------------------------------------------
 .../CopyOnWriteOpenHashMapThreadContextMap.java | 39 -----------------
 .../GarbageFreeOpenHashMapThreadContextMap.java | 42 ------------------
 .../CopyOnWriteOpenHashMapThreadContextMap.java | 42 ++++++++++++++++++
 .../GarbageFreeOpenHashMapThreadContextMap.java | 45 ++++++++++++++++++++
 4 files changed, 87 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/fee7f2e2/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWriteOpenHashMapThreadContextMap.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWriteOpenHashMapThreadContextMap.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWriteOpenHashMapThreadContextMap.java
deleted file mode 100644
index a1b82bb..0000000
--- a/log4j-api/src/main/java/org/apache/logging/log4j/spi/CopyOnWriteOpenHashMapThreadContextMap.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache license, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the license for the specific language governing permissions and
- * limitations under the license.
- */
-package org.apache.logging.log4j.spi;
-
-import org.apache.logging.log4j.util.PropertiesUtil;
-
-/**
- * {@code OpenHashMapContextData}-based implementation of the {@code ThreadContextMap} interface that creates a copy of
- * the data structure on every modification. Any particular instance of the data structure is a snapshot of the
- * ThreadContext at some point in time and can safely be passed off to other threads
- *
- * @since 2.7
- */
-public class CopyOnWriteOpenHashMapThreadContextMap extends AbstractCopyOnWriteMutableThreadContext {
-    @Override
-    protected MutableContextData createMutableContextData() {
-        return new OpenHashMapContextData<>(PropertiesUtil.getProperties().getIntegerProperty(
-                PROPERTY_NAME_INITIAL_CAPACITY, DEFAULT_INITIAL_CAPACITY));
-    }
-
-    @Override
-    protected MutableContextData createMutableContextData(final ContextData original) {
-        return new OpenHashMapContextData<>(original);
-    }
-}

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/fee7f2e2/log4j-api/src/main/java/org/apache/logging/log4j/spi/GarbageFreeOpenHashMapThreadContextMap.java
----------------------------------------------------------------------
diff --git a/log4j-api/src/main/java/org/apache/logging/log4j/spi/GarbageFreeOpenHashMapThreadContextMap.java b/log4j-api/src/main/java/org/apache/logging/log4j/spi/GarbageFreeOpenHashMapThreadContextMap.java
deleted file mode 100644
index 09ebb15..0000000
--- a/log4j-api/src/main/java/org/apache/logging/log4j/spi/GarbageFreeOpenHashMapThreadContextMap.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache license, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the license for the specific language governing permissions and
- * limitations under the license.
- */
-package org.apache.logging.log4j.spi;
-
-import org.apache.logging.log4j.util.PropertiesUtil;
-
-/**
- * {@code OpenHashMapContextData}-based implementation of the {@code ThreadContextMap} interface that attempts not to
- * create temporary objects. Adding and removing key-value pairs will not create temporary objects.
- * <p>
- * Since the underlying data structure is modified directly it is not suitable for passing by reference to other
- * threads. Instead, client code needs to copy the contents when interacting with another thread.
- * </p>
- *
- * @since 2.7
- */
-public class GarbageFreeOpenHashMapThreadContextMap extends AbstractGarbageFreeMutableThreadContext {
-    @Override
-    protected MutableContextData createMutableContextData() {
-        return new OpenHashMapContextData<>(PropertiesUtil.getProperties().getIntegerProperty(
-                PROPERTY_NAME_INITIAL_CAPACITY, DEFAULT_INITIAL_CAPACITY));
-    }
-
-    @Override
-    protected MutableContextData createMutableContextData(final ContextData original) {
-        return new OpenHashMapContextData<>(original);
-    }
-}

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/fee7f2e2/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/CopyOnWriteOpenHashMapThreadContextMap.java
----------------------------------------------------------------------
diff --git a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/CopyOnWriteOpenHashMapThreadContextMap.java b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/CopyOnWriteOpenHashMapThreadContextMap.java
new file mode 100644
index 0000000..f219c90
--- /dev/null
+++ b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/CopyOnWriteOpenHashMapThreadContextMap.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+package org.apache.logging.log4j.perf.nogc;
+
+import org.apache.logging.log4j.spi.AbstractCopyOnWriteMutableThreadContext;
+import org.apache.logging.log4j.spi.ContextData;
+import org.apache.logging.log4j.spi.MutableContextData;
+import org.apache.logging.log4j.util.PropertiesUtil;
+
+/**
+ * {@code OpenHashMapContextData}-based implementation of the {@code ThreadContextMap} interface that creates a copy of
+ * the data structure on every modification. Any particular instance of the data structure is a snapshot of the
+ * ThreadContext at some point in time and can safely be passed off to other threads
+ *
+ * @since 2.7
+ */
+public class CopyOnWriteOpenHashMapThreadContextMap extends AbstractCopyOnWriteMutableThreadContext {
+    @Override
+    protected MutableContextData createMutableContextData() {
+        return new OpenHashMapContextData<>(PropertiesUtil.getProperties().getIntegerProperty(
+                PROPERTY_NAME_INITIAL_CAPACITY, DEFAULT_INITIAL_CAPACITY));
+    }
+
+    @Override
+    protected MutableContextData createMutableContextData(final ContextData original) {
+        return new OpenHashMapContextData<>(original);
+    }
+}

http://git-wip-us.apache.org/repos/asf/logging-log4j2/blob/fee7f2e2/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/GarbageFreeOpenHashMapThreadContextMap.java
----------------------------------------------------------------------
diff --git a/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/GarbageFreeOpenHashMapThreadContextMap.java b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/GarbageFreeOpenHashMapThreadContextMap.java
new file mode 100644
index 0000000..400a76c
--- /dev/null
+++ b/log4j-perf/src/main/java/org/apache/logging/log4j/perf/nogc/GarbageFreeOpenHashMapThreadContextMap.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+package org.apache.logging.log4j.perf.nogc;
+
+import org.apache.logging.log4j.spi.AbstractGarbageFreeMutableThreadContext;
+import org.apache.logging.log4j.spi.ContextData;
+import org.apache.logging.log4j.spi.MutableContextData;
+import org.apache.logging.log4j.util.PropertiesUtil;
+
+/**
+ * {@code OpenHashMapContextData}-based implementation of the {@code ThreadContextMap} interface that attempts not to
+ * create temporary objects. Adding and removing key-value pairs will not create temporary objects.
+ * <p>
+ * Since the underlying data structure is modified directly it is not suitable for passing by reference to other
+ * threads. Instead, client code needs to copy the contents when interacting with another thread.
+ * </p>
+ *
+ * @since 2.7
+ */
+public class GarbageFreeOpenHashMapThreadContextMap extends AbstractGarbageFreeMutableThreadContext {
+    @Override
+    protected MutableContextData createMutableContextData() {
+        return new OpenHashMapContextData<>(PropertiesUtil.getProperties().getIntegerProperty(
+                PROPERTY_NAME_INITIAL_CAPACITY, DEFAULT_INITIAL_CAPACITY));
+    }
+
+    @Override
+    protected MutableContextData createMutableContextData(final ContextData original) {
+        return new OpenHashMapContextData<>(original);
+    }
+}