You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by tv...@apache.org on 2019/05/28 13:48:49 UTC

[commons-jcs] 01/09: Remove most Iterators

This is an automated email from the ASF dual-hosted git repository.

tv pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-jcs.git

commit d879971e521505d68cfa9e89b60d5a1eacbd16fc
Author: Thomas Vandahl <tv...@apache.org>
AuthorDate: Tue May 28 15:33:34 2019 +0200

    Remove most Iterators
---
 .../jcs/auxiliary/disk/block/BlockDiskCache.java   |  33 +-
 .../auxiliary/disk/indexed/IndexedDiskCache.java   |  18 +-
 .../disk/jdbc/dsfactory/JndiDataSourceFactory.java | 364 ++++++++++-----------
 .../auxiliary/remote/server/RemoteCacheServer.java |  21 +-
 .../apache/commons/jcs/engine/CacheListeners.java  |  29 +-
 .../commons/jcs/engine/control/CompositeCache.java |  35 +-
 .../jcs/engine/memory/AbstractMemoryCache.java     |  32 +-
 .../jcs/engine/memory/lru/LHMLRUMemoryCache.java   |   1 -
 .../commons/jcs/utils/struct/AbstractLRUMap.java   |  49 +--
 9 files changed, 248 insertions(+), 334 deletions(-)

diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java
index fa4bfe4..2eae22b 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCache.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -203,24 +202,28 @@ public class BlockDiskCache<K, V>
 
         try
         {
-            int maxToTest = 100;
-            int count = 0;
-            Iterator<Map.Entry<K, int[]>> it = this.keyStore.entrySet().iterator();
-            while ( it.hasNext() && count < maxToTest )
-            {
-                count++;
-                Map.Entry<K, int[]> entry = it.next();
-                Object data = this.dataFile.read( entry.getValue() );
-                if ( data == null )
-                {
-                    throw new Exception( logCacheName + "Couldn't find data for key [" + entry.getKey() + "]" );
-                }
-            }
+            this.keyStore.entrySet().stream()
+                .limit(100)
+                .forEach(entry -> {
+                    try
+                    {
+                        Object data = this.dataFile.read(entry.getValue());
+                        if ( data == null )
+                        {
+                            throw new IOException("Data is null");
+                        }
+                    }
+                    catch (IOException | ClassNotFoundException e)
+                    {
+                        throw new RuntimeException(logCacheName
+                                + " Couldn't find data for key [" + entry.getKey() + "]", e);
+                    }
+                });
             alright = true;
         }
         catch ( Exception e )
         {
-            log.warn( logCacheName + "Problem verifying disk.  Message [" + e.getMessage() + "]" );
+            log.warn(logCacheName + " Problem verifying disk.", e);
             alright = false;
         }
         finally
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
index 30a82d3..42384f9 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
@@ -23,11 +23,10 @@ import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -1364,24 +1363,15 @@ public class IndexedDiskCache<K, V> extends AbstractDiskCache<K, V>
      * Creates a snapshot of the IndexedDiskElementDescriptors in the keyHash and returns them
      * sorted by position in the dataFile.
      * <p>
-     * TODO fix values() method on the LRU map.
-     * <p>
      *
      * @return IndexedDiskElementDescriptor[]
      */
     private IndexedDiskElementDescriptor[] createPositionSortedDescriptorList()
     {
-        IndexedDiskElementDescriptor[] defragList = new IndexedDiskElementDescriptor[keyHash.size()];
-        Iterator<Map.Entry<K, IndexedDiskElementDescriptor>> iterator = keyHash.entrySet().iterator();
-        for (int i = 0; iterator.hasNext(); i++)
-        {
-            Map.Entry<K, IndexedDiskElementDescriptor> next = iterator.next();
-            defragList[i] = next.getValue();
-        }
-
-        Arrays.sort(defragList, new PositionComparator());
+        List<IndexedDiskElementDescriptor> defragList = new ArrayList<>(keyHash.values());
+        Collections.sort(defragList, new PositionComparator());
 
-        return defragList;
+        return defragList.toArray(new IndexedDiskElementDescriptor[0]);
     }
 
     /**
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/jdbc/dsfactory/JndiDataSourceFactory.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/jdbc/dsfactory/JndiDataSourceFactory.java
index 517db18..cd32e11 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/jdbc/dsfactory/JndiDataSourceFactory.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/jdbc/dsfactory/JndiDataSourceFactory.java
@@ -1,185 +1,179 @@
-package org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.sql.SQLException;
-import java.util.Hashtable;
-import java.util.Iterator;
-import java.util.Map;
-
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.sql.DataSource;
-
-import org.apache.commons.jcs.auxiliary.disk.jdbc.JDBCDiskCacheAttributes;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * A factory that looks up the DataSource from JNDI.  It is also able
- * to deploy the DataSource based on properties found in the
- * configuration.
- *
- * This factory tries to avoid excessive context lookups to improve speed.
- * The time between two lookups can be configured. The default is 0 (no cache).
- *
- * Borrowed and adapted from Apache DB Torque
- *
- * @author <a href="mailto:jmcnally@apache.org">John McNally</a>
- * @author <a href="mailto:thomas@vandahl.org">Thomas Vandahl</a>
- */
-public class JndiDataSourceFactory implements DataSourceFactory
-{
-    /** The log. */
-    private static Log log = LogFactory.getLog(JndiDataSourceFactory.class);
-
-    /** The name of the factory. */
-    private String name;
-
-    /** The path to get the resource from. */
-    private String path;
-
-    /** The context to get the resource from. */
-    private Context ctx;
-
-    /** A locally cached copy of the DataSource */
-    private DataSource ds = null;
-
-    /** Time of last actual lookup action */
-    private long lastLookup = 0;
-
-    /** Time between two lookups */
-    private long ttl = 0; // ms
-
-    /**
-     * @return the name of the factory.
-     */
-    @Override
-	public String getName()
-    {
-    	return name;
-    }
-
-    /**
-     * @see org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory.DataSourceFactory#getDataSource()
-     */
-    @Override
-	public DataSource getDataSource() throws SQLException
-    {
-        long time = System.currentTimeMillis();
-
-        if (ds == null || time - lastLookup > ttl)
-        {
-            try
-            {
-                synchronized (ctx)
-                {
-                    ds = ((DataSource) ctx.lookup(path));
-                }
-                lastLookup = time;
-            }
-            catch (NamingException e)
-            {
-                throw new SQLException(e);
-            }
-        }
-
-        return ds;
-    }
-
-    /**
-     * @see org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory.DataSourceFactory#initialize(JDBCDiskCacheAttributes)
-     */
-    @Override
-	public void initialize(JDBCDiskCacheAttributes config) throws SQLException
-    {
-    	this.name = config.getConnectionPoolName();
-        initJNDI(config);
-    }
-
-    /**
-     * Initializes JNDI.
-     *
-     * @param config where to read the settings from
-     * @throws SQLException if a property set fails
-     */
-    private void initJNDI(JDBCDiskCacheAttributes config) throws SQLException
-    {
-        log.debug("Starting initJNDI");
-
-        try
-        {
-            this.path = config.getJndiPath();
-            if (log.isDebugEnabled())
-            {
-                log.debug("JNDI path: " + path);
-            }
-
-            this.ttl = config.getJndiTTL();
-            if (log.isDebugEnabled())
-            {
-                log.debug("Time between context lookups: " + ttl);
-            }
-
-    		Hashtable<String, Object> env = new Hashtable<String, Object>();
-            ctx = new InitialContext(env);
-
-            if (log.isDebugEnabled())
-            {
-            	log.debug("Created new InitialContext");
-            	debugCtx(ctx);
-            }
-        }
-        catch (NamingException e)
-        {
-            throw new SQLException(e);
-        }
-    }
-
-    /**
-     * Does nothing. We do not want to close a dataSource retrieved from Jndi,
-     * because other applications might use it as well.
-     */
-    @Override
-	public void close()
-    {
-        // do nothing
-    }
-
-    /**
-     *
-     * @param ctx the context
-     * @throws NamingException
-     */
-    private void debugCtx(Context ctx) throws NamingException
-    {
-        log.debug("InitialContext -------------------------------");
-        Map<?, ?> env = ctx.getEnvironment();
-        Iterator<?> qw = env.entrySet().iterator();
-        log.debug("Environment properties:" + env.size());
-        while (qw.hasNext())
-        {
-            Map.Entry<?, ?> entry = (Map.Entry<?, ?>) qw.next();
-            log.debug("    " + entry.getKey() + ": " + entry.getValue());
-        }
-        log.debug("----------------------------------------------");
-    }
-}
+package org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.sql.SQLException;
+import java.util.Hashtable;
+import java.util.Map;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.sql.DataSource;
+
+import org.apache.commons.jcs.auxiliary.disk.jdbc.JDBCDiskCacheAttributes;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * A factory that looks up the DataSource from JNDI.  It is also able
+ * to deploy the DataSource based on properties found in the
+ * configuration.
+ *
+ * This factory tries to avoid excessive context lookups to improve speed.
+ * The time between two lookups can be configured. The default is 0 (no cache).
+ *
+ * Borrowed and adapted from Apache DB Torque
+ *
+ * @author <a href="mailto:jmcnally@apache.org">John McNally</a>
+ * @author <a href="mailto:thomas@vandahl.org">Thomas Vandahl</a>
+ */
+public class JndiDataSourceFactory implements DataSourceFactory
+{
+    /** The log. */
+    private static Log log = LogFactory.getLog(JndiDataSourceFactory.class);
+
+    /** The name of the factory. */
+    private String name;
+
+    /** The path to get the resource from. */
+    private String path;
+
+    /** The context to get the resource from. */
+    private Context ctx;
+
+    /** A locally cached copy of the DataSource */
+    private DataSource ds = null;
+
+    /** Time of last actual lookup action */
+    private long lastLookup = 0;
+
+    /** Time between two lookups */
+    private long ttl = 0; // ms
+
+    /**
+     * @return the name of the factory.
+     */
+    @Override
+	public String getName()
+    {
+    	return name;
+    }
+
+    /**
+     * @see org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory.DataSourceFactory#getDataSource()
+     */
+    @Override
+	public DataSource getDataSource() throws SQLException
+    {
+        long time = System.currentTimeMillis();
+
+        if (ds == null || time - lastLookup > ttl)
+        {
+            try
+            {
+                synchronized (ctx)
+                {
+                    ds = ((DataSource) ctx.lookup(path));
+                }
+                lastLookup = time;
+            }
+            catch (NamingException e)
+            {
+                throw new SQLException(e);
+            }
+        }
+
+        return ds;
+    }
+
+    /**
+     * @see org.apache.commons.jcs.auxiliary.disk.jdbc.dsfactory.DataSourceFactory#initialize(JDBCDiskCacheAttributes)
+     */
+    @Override
+	public void initialize(JDBCDiskCacheAttributes config) throws SQLException
+    {
+    	this.name = config.getConnectionPoolName();
+        initJNDI(config);
+    }
+
+    /**
+     * Initializes JNDI.
+     *
+     * @param config where to read the settings from
+     * @throws SQLException if a property set fails
+     */
+    private void initJNDI(JDBCDiskCacheAttributes config) throws SQLException
+    {
+        log.debug("Starting initJNDI");
+
+        try
+        {
+            this.path = config.getJndiPath();
+            if (log.isDebugEnabled())
+            {
+                log.debug("JNDI path: " + path);
+            }
+
+            this.ttl = config.getJndiTTL();
+            if (log.isDebugEnabled())
+            {
+                log.debug("Time between context lookups: " + ttl);
+            }
+
+    		Hashtable<String, Object> env = new Hashtable<String, Object>();
+            ctx = new InitialContext(env);
+
+            if (log.isDebugEnabled())
+            {
+            	log.debug("Created new InitialContext");
+            	debugCtx(ctx);
+            }
+        }
+        catch (NamingException e)
+        {
+            throw new SQLException(e);
+        }
+    }
+
+    /**
+     * Does nothing. We do not want to close a dataSource retrieved from Jndi,
+     * because other applications might use it as well.
+     */
+    @Override
+	public void close()
+    {
+        // do nothing
+    }
+
+    /**
+     *
+     * @param ctx the context
+     * @throws NamingException
+     */
+    private void debugCtx(Context ctx) throws NamingException
+    {
+        log.debug("InitialContext -------------------------------");
+        Map<?, ?> env = ctx.getEnvironment();
+        log.debug("Environment properties:" + env.size());
+        env.forEach((key, value) -> log.debug("    " + key + ": " + value));
+        log.debug("----------------------------------------------");
+    }
+}
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServer.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServer.java
index 58bda71..b1a241a 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServer.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/remote/server/RemoteCacheServer.java
@@ -27,7 +27,6 @@ import java.rmi.server.RMISocketFactory;
 import java.rmi.server.UnicastRemoteObject;
 import java.rmi.server.Unreferenced;
 import java.util.Collections;
-import java.util.Iterator;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
@@ -1234,21 +1233,11 @@ public class RemoteCacheServer<K, V>
     {
         synchronized ( eventQMap )
         {
-            for (Iterator<Map.Entry<Long, ICacheEventQueue<KK, VV>>> itr = eventQMap.entrySet().iterator(); itr.hasNext(); )
-            {
-                Map.Entry<Long, ICacheEventQueue<KK, VV>> e = itr.next();
-                ICacheEventQueue<KK, VV> q = e.getValue();
-
-                // this does not care if the q is alive (i.e. if
-                // there are active threads; it cares if the queue
-                // is working -- if it has not encountered errors
-                // above the failure threshold
-                if ( !q.isWorking() )
-                {
-                    itr.remove();
-                    log.warn( "Cache event queue " + q + " is not working and removed from cache server." );
-                }
-            }
+            // this does not care if the q is alive (i.e. if
+            // there are active threads; it cares if the queue
+            // is working -- if it has not encountered errors
+            // above the failure threshold
+            eventQMap.entrySet().removeIf(e -> !e.getValue().isWorking());
         }
     }
 
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/CacheListeners.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/CacheListeners.java
index 601529f..1c10a58 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/CacheListeners.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/CacheListeners.java
@@ -1,26 +1,5 @@
 package org.apache.commons.jcs.engine;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.util.Iterator;
-import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
@@ -68,11 +47,9 @@ public class CacheListeners<K, V>
         {
             buffer.append( "\n Event Queue Map " );
             buffer.append( "\n size = " + eventQMap.size() );
-            Iterator<Map.Entry<Long, ICacheEventQueue<K, V>>> it = eventQMap.entrySet().iterator();
-            while ( it.hasNext() )
-            {
-                buffer.append( "\n Entry: " + it.next() );
-            }
+            eventQMap.forEach((key, value)
+                    -> buffer.append( "\n Entry: key: ").append(key)
+                        .append(", value: ").append(value));
         }
         else
         {
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java
index 82e02a1..cf49552 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/control/CompositeCache.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ScheduledExecutorService;
@@ -732,13 +731,8 @@ public class CompositeCache<K, V>
         throws IOException
     {
         Map<K, ICacheElement<K, V>> elementsFromMemory = memCache.getMultiple(keys);
-        Iterator<Map.Entry<K, ICacheElement<K, V>>> elementFromMemoryIterator = elementsFromMemory.entrySet().iterator();
-
-        while (elementFromMemoryIterator.hasNext())
-        {
-            Map.Entry<K, ICacheElement<K, V>> entry = elementFromMemoryIterator.next();
+        elementsFromMemory.entrySet().removeIf(entry -> {
             ICacheElement<K, V> element = entry.getValue();
-
             if (isExpired(element))
             {
                 if (log.isDebugEnabled())
@@ -747,7 +741,7 @@ public class CompositeCache<K, V>
                 }
 
                 doExpires(element);
-                elementFromMemoryIterator.remove();
+                return true;
             }
             else
             {
@@ -758,8 +752,9 @@ public class CompositeCache<K, V>
 
                 // Update counters
                 hitCountRam.incrementAndGet();
+                return false;
             }
-        }
+        });
 
         return elementsFromMemory;
     }
@@ -982,11 +977,7 @@ public class CompositeCache<K, V>
     private void processRetrievedElements(AuxiliaryCache<K, V> aux, Map<K, ICacheElement<K, V>> elementsFromAuxiliary)
         throws IOException
     {
-        Iterator<Map.Entry<K, ICacheElement<K, V>>> elementFromAuxiliaryIterator = elementsFromAuxiliary.entrySet().iterator();
-
-        while (elementFromAuxiliaryIterator.hasNext())
-        {
-            Map.Entry<K, ICacheElement<K, V>> entry = elementFromAuxiliaryIterator.next();
+        elementsFromAuxiliary.entrySet().removeIf(entry -> {
             ICacheElement<K, V> element = entry.getValue();
 
             // Item found in one of the auxiliary caches.
@@ -1004,7 +995,7 @@ public class CompositeCache<K, V>
                     // associated with the item when it created govern its behavior
                     // everywhere.
                     doExpires(element);
-                    elementFromAuxiliaryIterator.remove();
+                    return true;
                 }
                 else
                 {
@@ -1015,10 +1006,20 @@ public class CompositeCache<K, V>
 
                     // Update counters
                     hitCountAux.incrementAndGet();
-                    copyAuxiliaryRetrievedItemToMemory(element);
+                    try
+                    {
+                        copyAuxiliaryRetrievedItemToMemory(element);
+                    }
+                    catch (IOException e)
+                    {
+                        log.error(cacheAttr.getCacheName()
+                                + " failed to copy element to memory " + element, e);
+                    }
                 }
             }
-        }
+
+            return false;
+        });
     }
 
     /**
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractMemoryCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractMemoryCache.java
index 4ec01d8..0bc69d4 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractMemoryCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/AbstractMemoryCache.java
@@ -22,7 +22,6 @@ package org.apache.commons.jcs.engine.memory;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.Map;
 import java.util.Set;
@@ -36,6 +35,7 @@ import org.apache.commons.jcs.engine.behavior.ICacheElement;
 import org.apache.commons.jcs.engine.behavior.ICompositeCacheAttributes;
 import org.apache.commons.jcs.engine.control.CompositeCache;
 import org.apache.commons.jcs.engine.control.group.GroupAttrName;
+import org.apache.commons.jcs.engine.control.group.GroupId;
 import org.apache.commons.jcs.engine.memory.behavior.IMemoryCache;
 import org.apache.commons.jcs.engine.memory.util.MemoryElementDescriptor;
 import org.apache.commons.jcs.engine.stats.StatElement;
@@ -334,31 +334,28 @@ public abstract class AbstractMemoryCache<K, V>
      */
     protected boolean removeByGroup(K key)
     {
-        boolean removed = false;
+        GroupId groupId = ((GroupAttrName<?>) key).groupId;
 
         // remove all keys of the same group hierarchy.
-        for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> itr = map.entrySet().iterator(); itr.hasNext();)
-        {
-            Map.Entry<K, MemoryElementDescriptor<K, V>> entry = itr.next();
+        return map.entrySet().removeIf(entry -> {
             K k = entry.getKey();
 
-            if (k instanceof GroupAttrName && ((GroupAttrName<?>) k).groupId.equals(((GroupAttrName<?>) key).groupId))
+            if (k instanceof GroupAttrName && ((GroupAttrName<?>) k).groupId.equals(groupId))
             {
                 lock.lock();
                 try
                 {
-                    itr.remove();
                     lockedRemoveElement(entry.getValue());
-                    removed = true;
+                    return true;
                 }
                 finally
                 {
                     lock.unlock();
                 }
             }
-        }
 
-        return removed;
+            return false;
+        });
     }
 
     /**
@@ -369,31 +366,28 @@ public abstract class AbstractMemoryCache<K, V>
      */
     protected boolean removeByHierarchy(K key)
     {
-        boolean removed = false;
+        String keyString = key.toString();
 
         // remove all keys of the same name hierarchy.
-        for (Iterator<Map.Entry<K, MemoryElementDescriptor<K, V>>> itr = map.entrySet().iterator(); itr.hasNext();)
-        {
-            Map.Entry<K, MemoryElementDescriptor<K, V>> entry = itr.next();
+        return map.entrySet().removeIf(entry -> {
             K k = entry.getKey();
 
-            if (k instanceof String && ((String) k).startsWith(key.toString()))
+            if (k instanceof String && ((String) k).startsWith(keyString))
             {
                 lock.lock();
                 try
                 {
-                    itr.remove();
                     lockedRemoveElement(entry.getValue());
-                    removed = true;
+                    return true;
                 }
                 finally
                 {
                     lock.unlock();
                 }
             }
-        }
 
-        return removed;
+            return false;
+        });
     }
 
     /**
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/lru/LHMLRUMemoryCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/lru/LHMLRUMemoryCache.java
index 2679fb7..2f90285 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/lru/LHMLRUMemoryCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/memory/lru/LHMLRUMemoryCache.java
@@ -189,7 +189,6 @@ public class LHMLRUMemoryCache<K, V>
             }
             else
             {
-
                 if ( log.isDebugEnabled() )
                 {
                     log.debug( "LHMLRU max size: " + getCacheAttributes().getMaxObjects()
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
index c2f5f2e..c069a6c 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
@@ -1,31 +1,9 @@
 package org.apache.commons.jcs.utils.struct;
 
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.Serializable;
 import java.util.AbstractMap;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Iterator;
 import java.util.Map;
-import java.util.NoSuchElementException;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.locks.Lock;
@@ -461,33 +439,22 @@ public abstract class AbstractLRUMap<K, V>
         }
 
         log.debug( "verifycache: checking via keysets!" );
-        for (Iterator<K> itr2 = map.keySet().iterator(); itr2.hasNext(); )
-        {
-            found = false;
-            Serializable val = null;
-            try
-            {
-                val = (Serializable) itr2.next();
-            }
-            catch ( NoSuchElementException nse )
-            {
-                log.error( "verifycache: no such element exception" );
-                continue;
-            }
+        map.forEach((key, value) -> {
+            boolean _found = false;
 
             for (LRUElementDescriptor<K, V> li2 = list.getFirst(); li2 != null; li2 = (LRUElementDescriptor<K, V>) li2.next )
             {
-                if ( val.equals( li2.getKey() ) )
+                if ( key.equals( li2.getKey() ) )
                 {
-                    found = true;
+                    _found = true;
                     break;
                 }
             }
-            if ( !found )
+            if ( !_found )
             {
-                log.error( "verifycache: key not found in list : " + val );
+                log.error( "verifycache: key not found in list : " + key );
                 dumpCacheEntries();
-                if ( map.containsKey( val ) )
+                if ( map.containsKey( key ) )
                 {
                     log.error( "verifycache: map contains key" );
                 }
@@ -496,7 +463,7 @@ public abstract class AbstractLRUMap<K, V>
                     log.error( "verifycache: map does NOT contain key, what the HECK!" );
                 }
             }
-        }
+        });
     }
 
     /**