You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by tv...@apache.org on 2019/06/03 15:21:25 UTC

[commons-jcs] branch master updated: Fix warnings, reduce memory footprint

This is an automated email from the ASF dual-hosted git repository.

tv pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-jcs.git


The following commit(s) were added to refs/heads/master by this push:
     new 786bfd8  Fix warnings, reduce memory footprint
786bfd8 is described below

commit 786bfd811ff1e030c2c22175093a0ce19b050bf1
Author: Thomas Vandahl <tv...@apache.org>
AuthorDate: Mon Jun 3 17:21:20 2019 +0200

    Fix warnings, reduce memory footprint
---
 .../org/apache/commons/jcs/admin/JCSAdminBean.java |   7 +-
 .../jcs/auxiliary/disk/block/BlockDisk.java        | 146 +++++++++++----------
 .../jcs/auxiliary/disk/indexed/IndexedDisk.java    |  97 +++++++-------
 .../auxiliary/disk/indexed/IndexedDiskCache.java   |  14 +-
 .../jcs/engine/behavior/IElementSerializer.java    |   3 +-
 .../utils/serialization/CompressingSerializer.java |   3 +-
 .../utils/serialization/StandardSerializer.java    |  16 +--
 .../commons/jcs/utils/struct/AbstractLRUMap.java   |   8 +-
 .../disk/block/BlockDiskCacheUnitTestAbstract.java |   1 +
 .../auxiliary/disk/block/BlockDiskUnitTest.java    |  98 +++++++-------
 .../jcs/utils/discovery/UDPDiscoveryUnitTest.java  |   1 +
 11 files changed, 192 insertions(+), 202 deletions(-)

diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/admin/JCSAdminBean.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/admin/JCSAdminBean.java
index 10fddd9..550fd74 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/admin/JCSAdminBean.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/admin/JCSAdminBean.java
@@ -252,15 +252,10 @@ public class JCSAdminBean implements JCSJMXBean
                 }
 
                 // 4 bytes lost for the serialization header
-                size = size + counter.getCount() - 4;
+                size += counter.getCount() - 4;
             }
         }
 
-        if (size > Long.MAX_VALUE)
-        {
-            throw new IllegalStateException("The size of cache " + cache.getCacheName() + " (" + size + " bytes) is too large to be represented as an long integer.");
-        }
-
         return size;
     }
 
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java
index aaf3bea..1781299 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDisk.java
@@ -19,13 +19,11 @@ package org.apache.commons.jcs.auxiliary.disk.block;
  * under the License.
  */
 
-import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.file.StandardOpenOption;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -41,10 +39,10 @@ import org.apache.commons.logging.LogFactory;
  * <p>
  * @author Aaron Smuts
  */
-public class BlockDisk
+public class BlockDisk implements AutoCloseable
 {
     /** The logger */
-    private static final Log log = LogFactory.getLog( BlockDisk.class );
+    private static final Log log = LogFactory.getLog(BlockDisk.class);
 
     /** The size of the header that indicates the amount of data stored in an occupied block. */
     public static final byte HEADER_SIZE_BYTES = 4;
@@ -87,10 +85,10 @@ public class BlockDisk
      * @param elementSerializer
      * @throws IOException
      */
-    public BlockDisk( File file, IElementSerializer elementSerializer )
+    public BlockDisk(File file, IElementSerializer elementSerializer)
         throws IOException
     {
-        this( file, DEFAULT_BLOCK_SIZE_BYTES, elementSerializer );
+        this(file, DEFAULT_BLOCK_SIZE_BYTES, elementSerializer);
     }
 
     /**
@@ -100,10 +98,10 @@ public class BlockDisk
      * @param blockSizeBytes
      * @throws IOException
      */
-    public BlockDisk( File file, int blockSizeBytes )
+    public BlockDisk(File file, int blockSizeBytes)
         throws IOException
     {
-        this( file, blockSizeBytes, new StandardSerializer() );
+        this(file, blockSizeBytes, new StandardSerializer());
     }
 
     /**
@@ -114,17 +112,19 @@ public class BlockDisk
      * @param elementSerializer
      * @throws IOException
      */
-    public BlockDisk( File file, int blockSizeBytes, IElementSerializer elementSerializer )
+    public BlockDisk(File file, int blockSizeBytes, IElementSerializer elementSerializer)
         throws IOException
     {
         this.filepath = file.getAbsolutePath();
-        RandomAccessFile raf = new RandomAccessFile( filepath, "rw" );
-        this.fc = raf.getChannel();
+        this.fc = FileChannel.open(file.toPath(), 
+                StandardOpenOption.CREATE, 
+                StandardOpenOption.READ, 
+                StandardOpenOption.WRITE);
         this.numberOfBlocks.set((int) Math.ceil(1f * this.fc.size() / blockSizeBytes));
 
-        if ( log.isInfoEnabled() )
+        if (log.isInfoEnabled())
         {
-            log.info( "Constructing BlockDisk, blockSizeBytes [" + blockSizeBytes + "]" );
+            log.info("Constructing BlockDisk, blockSizeBytes [" + blockSizeBytes + "]");
         }
 
         this.blockSizeBytes = blockSizeBytes;
@@ -173,15 +173,15 @@ public class BlockDisk
      * @return the blocks we used.
      * @throws IOException
      */
-    protected int[] write( Serializable object )
+    protected <T> int[] write(T object)
         throws IOException
     {
         // serialize the object
         byte[] data = elementSerializer.serialize(object);
 
-        if ( log.isDebugEnabled() )
+        if (log.isDebugEnabled())
         {
-            log.debug( "write, total pre-chunking data.length = " + data.length );
+            log.debug("write, total pre-chunking data.length = " + data.length);
         }
 
         this.putBytes.addAndGet(data.length);
@@ -190,9 +190,9 @@ public class BlockDisk
         // figure out how many blocks we need.
         int numBlocksNeeded = calculateTheNumberOfBlocksNeeded(data);
 
-        if ( log.isDebugEnabled() )
+        if (log.isDebugEnabled())
         {
-            log.debug( "numBlocksNeeded = " + numBlocksNeeded );
+            log.debug("numBlocksNeeded = " + numBlocksNeeded);
         }
 
         // allocate blocks
@@ -201,23 +201,25 @@ public class BlockDisk
         int offset = 0;
         final int maxChunkSize = blockSizeBytes - HEADER_SIZE_BYTES;
         ByteBuffer headerBuffer = ByteBuffer.allocate(HEADER_SIZE_BYTES);
+        ByteBuffer dataBuffer = ByteBuffer.wrap(data);
 
         for (int i = 0; i < numBlocksNeeded; i++)
         {
             headerBuffer.clear();
             int length = Math.min(maxChunkSize, data.length - offset);
             headerBuffer.putInt(length);
+            headerBuffer.flip();
 
-            ByteBuffer dataBuffer = ByteBuffer.wrap(data, offset, length);
+            dataBuffer.position(offset).limit(offset + length);
+            ByteBuffer slice = dataBuffer.slice();
 
             long position = calculateByteOffsetForBlockAsLong(blocks[i]);
             // write the header
-            headerBuffer.flip();
             int written = fc.write(headerBuffer, position);
             assert written == HEADER_SIZE_BYTES;
 
-            //write the data
-            written = fc.write(dataBuffer, position + HEADER_SIZE_BYTES);
+            //write the data 
+            written = fc.write(slice, position + HEADER_SIZE_BYTES);
             assert written == length;
 
             offset += length;
@@ -235,11 +237,11 @@ public class BlockDisk
      * @param numBlocksNeeded
      * @return byte[][]
      */
-    protected byte[][] getBlockChunks( byte[] complete, int numBlocksNeeded )
+    protected byte[][] getBlockChunks(byte[] complete, int numBlocksNeeded)
     {
         byte[][] chunks = new byte[numBlocksNeeded][];
 
-        if ( numBlocksNeeded == 1 )
+        if (numBlocksNeeded == 1)
         {
             chunks[0] = complete;
         }
@@ -248,15 +250,15 @@ public class BlockDisk
             int maxChunkSize = this.blockSizeBytes - HEADER_SIZE_BYTES;
             int totalBytes = complete.length;
             int totalUsed = 0;
-            for ( short i = 0; i < numBlocksNeeded; i++ )
+            for (short i = 0; i < numBlocksNeeded; i++)
             {
                 // use the max that can be written to a block or whatever is left in the original
                 // array
-                int chunkSize = Math.min( maxChunkSize, totalBytes - totalUsed );
+                int chunkSize = Math.min(maxChunkSize, totalBytes - totalUsed);
                 byte[] chunk = new byte[chunkSize];
                 // copy from the used position to the chunk size on the complete array to the chunk
                 // array.
-                System.arraycopy( complete, totalUsed, chunk, 0, chunkSize );
+                System.arraycopy(complete, totalUsed, chunk, 0, chunkSize);
                 chunks[i] = chunk;
                 totalUsed += chunkSize;
             }
@@ -269,39 +271,38 @@ public class BlockDisk
      * Reads an object that is located in the specified blocks.
      * <p>
      * @param blockNumbers
-     * @return Serializable
+     * @return the object instance
      * @throws IOException
      * @throws ClassNotFoundException
      */
-    protected <T extends Serializable> T read( int[] blockNumbers )
+    protected <T> T read(int[] blockNumbers)
         throws IOException, ClassNotFoundException
     {
-        byte[] data = null;
+        ByteBuffer data = null;
 
-        if ( blockNumbers.length == 1 )
+        if (blockNumbers.length == 1)
         {
-            data = readBlock( blockNumbers[0] );
+            data = readBlock(blockNumbers[0]);
         }
         else
         {
-            ByteArrayOutputStream baos = new ByteArrayOutputStream(getBlockSizeBytes());
+            data = ByteBuffer.allocate(blockNumbers.length * getBlockSizeBytes());
             // get all the blocks into data
-            for ( short i = 0; i < blockNumbers.length; i++ )
+            for (short i = 0; i < blockNumbers.length; i++)
             {
-                byte[] chunk = readBlock( blockNumbers[i] );
-                baos.write(chunk);
+                ByteBuffer chunk = readBlock(blockNumbers[i]);
+                data.put(chunk);
             }
 
-            data = baos.toByteArray();
-            baos.close();
+            data.flip();
         }
 
-        if ( log.isDebugEnabled() )
+        if (log.isDebugEnabled())
         {
-            log.debug( "read, total post combination data.length = " + data.length );
+            log.debug("read, total post combination data.length = " + data.limit());
         }
 
-        return elementSerializer.deSerialize( data, null );
+        return elementSerializer.deSerialize(data.array(), null);
     }
 
     /**
@@ -314,7 +315,7 @@ public class BlockDisk
      * @param block
      * @throws IOException
      */
-    private byte[] readBlock( int block )
+    private ByteBuffer readBlock(int block)
         throws IOException
     {
         int datalen = 0;
@@ -323,8 +324,8 @@ public class BlockDisk
         boolean corrupted = false;
         long fileLength = fc.size();
 
-        long position = calculateByteOffsetForBlockAsLong( block );
-//        if ( position > fileLength )
+        long position = calculateByteOffsetForBlockAsLong(block);
+//        if (position > fileLength)
 //        {
 //            corrupted = true;
 //            message = "Record " + position + " starts past EOF.";
@@ -332,27 +333,27 @@ public class BlockDisk
 //        else
         {
             ByteBuffer datalength = ByteBuffer.allocate(HEADER_SIZE_BYTES);
-            fc.read(datalength, position);
+            fc.read(datalength, position); 
             datalength.flip();
             datalen = datalength.getInt();
-            if ( position + datalen > fileLength )
+            if (position + datalen > fileLength)
             {
                 corrupted = true;
                 message = "Record " + position + " exceeds file length.";
             }
         }
 
-        if ( corrupted )
+        if (corrupted)
         {
-            log.warn( "\n The file is corrupt: " + "\n " + message );
-            throw new IOException( "The File Is Corrupt, need to reset" );
+            log.warn("\n The file is corrupt: " + "\n " + message);
+            throw new IOException("The File Is Corrupt, need to reset");
         }
 
         ByteBuffer data = ByteBuffer.allocate(datalen);
         fc.read(data, position + HEADER_SIZE_BYTES);
         data.flip();
 
-        return data.array();
+        return data;
     }
 
     /**
@@ -360,13 +361,13 @@ public class BlockDisk
      * <p>
      * @param blocksToFree
      */
-    protected void freeBlocks( int[] blocksToFree )
+    protected void freeBlocks(int[] blocksToFree)
     {
-        if ( blocksToFree != null )
+        if (blocksToFree != null)
         {
-            for ( short i = 0; i < blocksToFree.length; i++ )
+            for (short i = 0; i < blocksToFree.length; i++)
             {
-                emptyBlocks.offer( Integer.valueOf( blocksToFree[i] ) );
+                emptyBlocks.offer(Integer.valueOf(blocksToFree[i]));
             }
         }
     }
@@ -378,7 +379,7 @@ public class BlockDisk
      * @return the byte offset for this block in the file as a long
      * @since 2.0
      */
-    protected long calculateByteOffsetForBlockAsLong( int block )
+    protected long calculateByteOffsetForBlockAsLong(int block)
     {
         return (long) block * blockSizeBytes;
     }
@@ -389,21 +390,21 @@ public class BlockDisk
      * @param data
      * @return the number of blocks needed to store the byte array
      */
-    protected int calculateTheNumberOfBlocksNeeded( byte[] data )
+    protected int calculateTheNumberOfBlocksNeeded(byte[] data)
     {
         int dataLength = data.length;
 
         int oneBlock = blockSizeBytes - HEADER_SIZE_BYTES;
 
         // takes care of 0 = HEADER_SIZE_BYTES + blockSizeBytes
-        if ( dataLength <= oneBlock )
+        if (dataLength <= oneBlock)
         {
             return 1;
         }
 
         int dividend = dataLength / oneBlock;
 
-        if ( dataLength % oneBlock != 0 )
+        if (dataLength % oneBlock != 0)
         {
             dividend++;
         }
@@ -427,9 +428,12 @@ public class BlockDisk
      * <p>
      * @throws IOException
      */
-    protected void close()
+    @Override
+    public void close()
         throws IOException
     {
+        this.numberOfBlocks.set(0);
+        this.emptyBlocks.clear();
         fc.close();
     }
 
@@ -470,7 +474,7 @@ public class BlockDisk
     {
         long count = this.putCount.get();
 
-        if (count == 0 )
+        if (count == 0)
         {
             return 0;
         }
@@ -494,19 +498,19 @@ public class BlockDisk
     public String toString()
     {
         StringBuilder buf = new StringBuilder();
-        buf.append( "\nBlock Disk " );
-        buf.append( "\n  Filepath [" + filepath + "]" );
-        buf.append( "\n  NumberOfBlocks [" + this.numberOfBlocks.get() + "]" );
-        buf.append( "\n  BlockSizeBytes [" + this.blockSizeBytes + "]" );
-        buf.append( "\n  Put Bytes [" + this.putBytes + "]" );
-        buf.append( "\n  Put Count [" + this.putCount + "]" );
-        buf.append( "\n  Average Size [" + getAveragePutSizeBytes() + "]" );
-        buf.append( "\n  Empty Blocks [" + this.getEmptyBlocks() + "]" );
+        buf.append("\nBlock Disk ");
+        buf.append("\n  Filepath [" + filepath + "]");
+        buf.append("\n  NumberOfBlocks [" + this.numberOfBlocks.get() + "]");
+        buf.append("\n  BlockSizeBytes [" + this.blockSizeBytes + "]");
+        buf.append("\n  Put Bytes [" + this.putBytes + "]");
+        buf.append("\n  Put Count [" + this.putCount + "]");
+        buf.append("\n  Average Size [" + getAveragePutSizeBytes() + "]");
+        buf.append("\n  Empty Blocks [" + this.getEmptyBlocks() + "]");
         try
         {
-            buf.append( "\n  Length [" + length() + "]" );
+            buf.append("\n  Length [" + length() + "]");
         }
-        catch ( IOException e )
+        catch (IOException e)
         {
             // swallow
         }
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDisk.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDisk.java
index ee6a3ad..4d59f5e 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDisk.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDisk.java
@@ -20,19 +20,17 @@ package org.apache.commons.jcs.auxiliary.disk.indexed;
  */
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.file.StandardOpenOption;
 
 import org.apache.commons.jcs.engine.behavior.IElementSerializer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 /** Provides thread safe access to the underlying random access file. */
-class IndexedDisk
+public class IndexedDisk implements AutoCloseable
 {
     /** The size of the header that indicates the amount of data stored in an occupied block. */
     public static final byte HEADER_SIZE_BYTES = 4;
@@ -41,7 +39,7 @@ class IndexedDisk
     private final IElementSerializer elementSerializer;
 
     /** The logger */
-    private static final Log log = LogFactory.getLog( IndexedDisk.class );
+    private static final Log log = LogFactory.getLog(IndexedDisk.class);
 
     /** The path to the log directory. */
     private final String filepath;
@@ -54,15 +52,17 @@ class IndexedDisk
      * <p>
      * @param file
      * @param elementSerializer
-     * @throws FileNotFoundException
+     * @throws IOException
      */
-    public IndexedDisk( File file, IElementSerializer elementSerializer )
-        throws FileNotFoundException
+    public IndexedDisk(File file, IElementSerializer elementSerializer)
+        throws IOException
     {
         this.filepath = file.getAbsolutePath();
         this.elementSerializer = elementSerializer;
-        RandomAccessFile raf = new RandomAccessFile( filepath, "rw" );
-        this.fc = raf.getChannel();
+        this.fc = FileChannel.open(file.toPath(), 
+                StandardOpenOption.CREATE, 
+                StandardOpenOption.READ, 
+                StandardOpenOption.WRITE);
     }
 
     /**
@@ -76,13 +76,13 @@ class IndexedDisk
      * @throws IOException
      * @throws ClassNotFoundException
      */
-    protected <T extends Serializable> T readObject( IndexedDiskElementDescriptor ded )
+    protected <T> T readObject(IndexedDiskElementDescriptor ded)
         throws IOException, ClassNotFoundException
     {
         String message = null;
         boolean corrupted = false;
         long fileLength = fc.size();
-        if ( ded.pos > fileLength )
+        if (ded.pos > fileLength)
         {
             corrupted = true;
             message = "Record " + ded + " starts past EOF.";
@@ -93,29 +93,29 @@ class IndexedDisk
             fc.read(datalength, ded.pos);
             datalength.flip();
             int datalen = datalength.getInt();
-            if ( ded.len != datalen )
+            if (ded.len != datalen)
             {
                 corrupted = true;
                 message = "Record " + ded + " does not match data length on disk (" + datalen + ")";
             }
-            else if ( ded.pos + ded.len > fileLength )
+            else if (ded.pos + ded.len > fileLength)
             {
                 corrupted = true;
                 message = "Record " + ded + " exceeds file length.";
             }
         }
 
-        if ( corrupted )
+        if (corrupted)
         {
-            log.warn( "\n The file is corrupt: " + "\n " + message );
-            throw new IOException( "The File Is Corrupt, need to reset" );
+            log.warn("\n The file is corrupt: " + "\n " + message);
+            throw new IOException("The File Is Corrupt, need to reset");
         }
 
         ByteBuffer data = ByteBuffer.allocate(ded.len);
         fc.read(data, ded.pos + HEADER_SIZE_BYTES);
         data.flip();
 
-        return elementSerializer.deSerialize( data.array(), null );
+        return elementSerializer.deSerialize(data.array(), null);
     }
 
     /**
@@ -125,7 +125,7 @@ class IndexedDisk
      * @param newPosition
      * @throws IOException
      */
-    protected void move( final IndexedDiskElementDescriptor ded, final long newPosition )
+    protected void move(final IndexedDiskElementDescriptor ded, final long newPosition)
         throws IOException
     {
         ByteBuffer datalength = ByteBuffer.allocate(HEADER_SIZE_BYTES);
@@ -133,9 +133,9 @@ class IndexedDisk
         datalength.flip();
         int length = datalength.getInt();
 
-        if ( length != ded.len )
+        if (length != ded.len)
         {
-            throw new IOException( "Mismatched memory and disk length (" + length + ") for " + ded );
+            throw new IOException("Mismatched memory and disk length (" + length + ") for " + ded);
         }
 
         // TODO: more checks?
@@ -147,10 +147,10 @@ class IndexedDisk
         int remaining = HEADER_SIZE_BYTES + length;
         ByteBuffer buffer = ByteBuffer.allocate(16384);
 
-        while ( remaining > 0 )
+        while (remaining > 0)
         {
             // chunk it
-            int chunkSize = Math.min( remaining, buffer.capacity() );
+            int chunkSize = Math.min(remaining, buffer.capacity());
             buffer.limit(chunkSize);
             fc.read(buffer, readPos);
             buffer.flip();
@@ -173,28 +173,32 @@ class IndexedDisk
      * @return true if we wrote successfully
      * @throws IOException
      */
-    protected boolean write( IndexedDiskElementDescriptor ded, byte[] data )
+    protected boolean write(IndexedDiskElementDescriptor ded, byte[] data)
         throws IOException
     {
         long pos = ded.pos;
-        if ( log.isTraceEnabled() )
+        if (log.isTraceEnabled())
         {
-            log.trace( "write> pos=" + pos );
-            log.trace( fc + " -- data.length = " + data.length );
+            log.trace("write> pos=" + pos);
+            log.trace(fc + " -- data.length = " + data.length);
         }
 
-        if ( data.length != ded.len )
+        if (data.length != ded.len)
         {
-            throw new IOException( "Mismatched descriptor and data lengths" );
+            throw new IOException("Mismatched descriptor and data lengths");
         }
 
-        ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE_BYTES + data.length);
-        buffer.putInt(data.length);
-        buffer.put(data);
-        buffer.flip();
-        int written = fc.write(buffer, pos);
-        //fc.force(true);
+        ByteBuffer headerBuffer = ByteBuffer.allocate(HEADER_SIZE_BYTES);
+        headerBuffer.putInt(data.length);
+        // write the header
+        headerBuffer.flip();
+        int written = fc.write(headerBuffer, pos);
+        assert written == HEADER_SIZE_BYTES;
 
+        //write the data
+        ByteBuffer dataBuffer = ByteBuffer.wrap(data);
+        written = fc.write(dataBuffer, pos + HEADER_SIZE_BYTES);
+        
         return written == data.length;
     }
 
@@ -202,17 +206,15 @@ class IndexedDisk
      * Serializes the object and write it out to the given position.
      * <p>
      * TODO: make this take a ded as well.
-     * @return true unless error
      * @param obj
      * @param pos
      * @throws IOException
      */
-    protected boolean writeObject( Serializable obj, long pos )
+    protected <T> void writeObject(T obj, long pos)
         throws IOException
     {
-        byte[] data = elementSerializer.serialize( obj );
-        write( new IndexedDiskElementDescriptor( pos, data.length ), data );
-        return true;
+        byte[] data = elementSerializer.serialize(obj);
+        write(new IndexedDiskElementDescriptor(pos, data.length), data);
     }
 
     /**
@@ -232,7 +234,8 @@ class IndexedDisk
      * <p>
      * @throws IOException
      */
-    protected void close()
+    @Override
+    public void close()
         throws IOException
     {
         fc.close();
@@ -246,9 +249,9 @@ class IndexedDisk
     protected synchronized void reset()
         throws IOException
     {
-        if ( log.isDebugEnabled() )
+        if (log.isDebugEnabled())
         {
-            log.debug( "Resetting Indexed File [" + filepath + "]" );
+            log.debug("Resetting Indexed File [" + filepath + "]");
         }
         fc.truncate(0);
         fc.force(true);
@@ -260,14 +263,14 @@ class IndexedDisk
      * @param length the new length of the file
      * @throws IOException
      */
-    protected void truncate( long length )
+    protected void truncate(long length)
         throws IOException
     {
-        if ( log.isInfoEnabled() )
+        if (log.isInfoEnabled())
         {
-            log.info( "Truncating file [" + filepath + "] to " + length );
+            log.info("Truncating file [" + filepath + "] to " + length);
         }
-        fc.truncate( length );
+        fc.truncate(length);
     }
 
     /**
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
index 9f2e3c8..24c1efc 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/auxiliary/disk/indexed/IndexedDiskCache.java
@@ -22,6 +22,7 @@ package org.apache.commons.jcs.auxiliary.disk.indexed;
 import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -940,23 +941,16 @@ public class IndexedDiskCache<K, V> extends AbstractDiskCache<K, V>
             {
                 dataFile.close();
             }
+            
             File dataFileTemp = new File(rafDir, fileName + ".data");
-            boolean result = dataFileTemp.delete();
-            if (!result && log.isDebugEnabled())
-            {
-                log.debug("Could not delete file " + dataFileTemp);
-            }
+            Files.delete(dataFileTemp.toPath());
 
             if (keyFile != null)
             {
                 keyFile.close();
             }
             File keyFileTemp = new File(rafDir, fileName + ".key");
-            result = keyFileTemp.delete();
-            if (!result && log.isDebugEnabled())
-            {
-                log.debug("Could not delete file " + keyFileTemp);
-            }
+            Files.delete(keyFileTemp.toPath());
 
             dataFile = new IndexedDisk(new File(rafDir, fileName + ".data"), getElementSerializer());
             keyFile = new IndexedDisk(new File(rafDir, fileName + ".key"), getElementSerializer());
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/behavior/IElementSerializer.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/behavior/IElementSerializer.java
index 547cc1e..2a74713 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/behavior/IElementSerializer.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/engine/behavior/IElementSerializer.java
@@ -40,7 +40,8 @@ public interface IElementSerializer
 
     /**
      * Turns a byte array into an object.
-     * @param bytes
+     * @param bytes data bytes
+     * @param loader class loader to use
      * @return Object
      * @throws IOException
      * @throws ClassNotFoundException thrown if we don't know the object.
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/CompressingSerializer.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/CompressingSerializer.java
index f28a778..bbc0ee9 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/CompressingSerializer.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/CompressingSerializer.java
@@ -48,7 +48,8 @@ public class CompressingSerializer extends StandardSerializer
      * Uses default de-serialization to turn a byte array into an object. Decompresses the value
      * first. All exceptions are converted into IOExceptions.
      * <p>
-     * @param data bytes of data
+     * @param data data bytes
+     * @param loader class loader to use
      * @return Object
      * @throws IOException on i/o problem
      * @throws ClassNotFoundException if class is not found during deserialization
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/StandardSerializer.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/StandardSerializer.java
index 7206b3f..0915800 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/StandardSerializer.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/serialization/StandardSerializer.java
@@ -44,14 +44,14 @@ public class StandardSerializer
      * @throws IOException
      */
     @Override
-    public <T> byte[] serialize( T obj )
+    public <T> byte[] serialize(T obj)
         throws IOException
     {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
 
-        try (ObjectOutputStream oos = new ObjectOutputStream( baos ))
+        try (ObjectOutputStream oos = new ObjectOutputStream(baos))
         {
-            oos.writeObject( obj );
+            oos.writeObject(obj);
         }
 
         return baos.toByteArray();
@@ -61,18 +61,18 @@ public class StandardSerializer
      * Uses default de-serialization to turn a byte array into an object. All exceptions are
      * converted into IOExceptions.
      * <p>
-     * @param data
+     * @param data data bytes
+     * @param loader class loader to use
      * @return Object
      * @throws IOException
      * @throws ClassNotFoundException
      */
     @Override
-    public <T> T deSerialize( byte[] data, ClassLoader loader )
+    public <T> T deSerialize(byte[] data, ClassLoader loader)
         throws IOException, ClassNotFoundException
     {
-        ByteArrayInputStream bais = new ByteArrayInputStream( data );
-
-        try (ObjectInputStream ois = new ObjectInputStreamClassLoaderAware( bais, loader ))
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(data);
+             ObjectInputStream ois = new ObjectInputStreamClassLoaderAware(bais, loader))
         {
             @SuppressWarnings("unchecked") // Need to cast from Object
             T readObject = (T) ois.readObject();
diff --git a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
index f2ed5a2..94654a3 100644
--- a/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
+++ b/commons-jcs-core/src/main/java/org/apache/commons/jcs/utils/struct/AbstractLRUMap.java
@@ -417,7 +417,6 @@ public abstract class AbstractLRUMap<K, V>
             return;
         }
 
-        boolean found = false;
         log.debug( "verifycache: mapContains " + map.size() +
                 " elements, linked list contains " + list.size() + " elements" );
         log.debug( "verifycache: checking linked list by key " );
@@ -459,17 +458,17 @@ public abstract class AbstractLRUMap<K, V>
 
         log.debug( "verifycache: checking via keysets!" );
         map.forEach((key, value) -> {
-            boolean _found = false;
+            boolean found = false;
 
             for (LRUElementDescriptor<K, V> li2 = list.getFirst(); li2 != null; li2 = (LRUElementDescriptor<K, V>) li2.next )
             {
                 if ( key.equals( li2.getKey() ) )
                 {
-                    _found = true;
+                    found = true;
                     break;
                 }
             }
-            if ( !_found )
+            if ( !found )
             {
                 log.error( "verifycache: key not found in list : " + key );
                 dumpCacheEntries();
@@ -559,5 +558,4 @@ public abstract class AbstractLRUMap<K, V>
                 .map(value -> value.getKey())
                 .collect(Collectors.toSet());
     }
-
 }
diff --git a/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheUnitTestAbstract.java b/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheUnitTestAbstract.java
index 5254dbc..45f816c 100644
--- a/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheUnitTestAbstract.java
+++ b/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskCacheUnitTestAbstract.java
@@ -145,6 +145,7 @@ public abstract class BlockDiskCacheUnitTestAbstract extends TestCase
         Serializable result = elementSerializer.deSerialize(resultData, null);
         // System.out.println( result );
         assertEquals("wrong string after retrieval", string, result);
+        blockDisk.close();
     }
 
     /**
diff --git a/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskUnitTest.java b/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskUnitTest.java
index f90326f..ceee8ca 100644
--- a/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskUnitTest.java
+++ b/commons-jcs-core/src/test/java/org/apache/commons/jcs/auxiliary/disk/block/BlockDiskUnitTest.java
@@ -23,6 +23,8 @@ import junit.framework.TestCase;
 import org.apache.commons.jcs.utils.serialization.StandardSerializer;
 
 import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
 import java.util.Random;
 
 /**
@@ -35,17 +37,45 @@ public class BlockDiskUnitTest
 {
     /** data file. */
     private File rafDir;
+    private BlockDisk disk;
 
     /**
+     * @see junit.framework.TestCase#setUp()
      * Creates the base directory
      */
-    public BlockDiskUnitTest()
+    @Override
+    protected void setUp() throws Exception
     {
+        super.setUp();
         String rootDirName = "target/test-sandbox/block";
         this.rafDir = new File( rootDirName );
         this.rafDir.mkdirs();
     }
 
+    private void setUpBlockDisk(String fileName) throws IOException
+    {
+        File file = new File(rafDir, fileName + ".data");
+        Files.delete(file.toPath());
+        this.disk = new BlockDisk(file, new StandardSerializer());
+    }
+    
+    private void setUpBlockDisk(String fileName, int blockSize) throws IOException
+    {
+        File file = new File(rafDir, fileName + ".data");
+        Files.delete(file.toPath());
+        this.disk = new BlockDisk(file, blockSize, new StandardSerializer());
+    }
+    
+    /**
+     * @see junit.framework.TestCase#tearDown()
+     */
+    @Override
+    protected void tearDown() throws Exception
+    {
+        disk.close();
+        super.tearDown();
+    }
+
     /**
      * Test writing a null object within a single block size.
      * <p>
@@ -55,10 +85,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWrite_NullBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWrite_NullBlockElement");
 
         // DO WORK
         int[] blocks = disk.write( null );
@@ -78,10 +105,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWrite_SingleBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWrite_SingleBlockElement");
 
         // DO WORK
         int bytes = 1 * 1024;
@@ -102,10 +126,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteAndRead_SingleBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWriteAndRead_SingleBlockElement");
 
         // DO WORK
         int bytes = 1 * 1024;
@@ -126,10 +147,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWrite_TwoSingleBlockElements";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWrite_TwoSingleBlockElements");
 
         // DO WORK
         int bytes = 1 * 1024;
@@ -153,10 +171,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testCalculateBlocksNeededDouble";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testCalculateBlocksNeededDouble");
 
         // DO WORK
         int result = disk.calculateTheNumberOfBlocksNeeded( new byte[disk.getBlockSizeBytes() * 2
@@ -175,9 +190,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteDoubleBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWriteDoubleBlockElement");
 
         // DO WORK
         // byte arrays encur 27 bytes of serialization overhead.
@@ -201,9 +214,7 @@ public class BlockDiskUnitTest
         // SETUP
         int numBlocks = 128;
 
-        String fileName = "testWrite_128BlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWrite_128BlockElement");
 
         // DO WORK
         // byte arrays encur 27 bytes of serialization overhead.
@@ -225,10 +236,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteAndReadSingleBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        BlockDisk disk = new BlockDisk( file, new StandardSerializer() );
+        setUpBlockDisk("testWriteAndReadSingleBlockElement");
 
         // DO WORK
         int numBlocksPerElement = 4;
@@ -255,11 +263,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteAndReadSingleBlockElement";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        int blockSizeBytes = 1024;
-        BlockDisk disk = new BlockDisk( file, blockSizeBytes );
+        setUpBlockDisk("testWriteAndReadSingleBlockElement", 1024);
 
         // DO WORK
         int numBlocksPerElement = 4;
@@ -308,11 +312,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteAndRead_BigString";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        int blockSizeBytes = 4096;//1024;
-        BlockDisk disk = new BlockDisk( file, blockSizeBytes, new StandardSerializer() );
+        setUpBlockDisk("testWriteAndRead_BigString", 4096); //1024
 
         String string = "This is my big string ABCDEFGH";
         StringBuilder sb = new StringBuilder();
@@ -343,11 +343,7 @@ public class BlockDiskUnitTest
         throws Exception
     {
         // SETUP
-        String fileName = "testWriteAndRead_BigString";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        int blockSizeBytes = 47;//4096;//1024;
-        BlockDisk disk = new BlockDisk( file, blockSizeBytes, new StandardSerializer() );
+        setUpBlockDisk("testWriteAndRead_BigString", 47); //4096;//1024
 
         String string = "abcdefghijklmnopqrstuvwxyz1234567890";
         string += string;
@@ -361,16 +357,12 @@ public class BlockDiskUnitTest
         assertEquals( "Wrong item retured.", string, result );
     }
 
-    public void testJCS156() throws Exception {
+    public void testJCS156() throws Exception 
+    {
         // SETUP
-        String fileName = "testJCS156";
-        File file = new File( rafDir, fileName + ".data" );
-        file.delete();
-        int blockSizeBytes = 4096;
-        BlockDisk disk = new BlockDisk( file, blockSizeBytes, new StandardSerializer() );
+        setUpBlockDisk("testJCS156", 4096);
         long offset = disk.calculateByteOffsetForBlockAsLong(Integer.MAX_VALUE);
         assertTrue("Must not wrap round", offset > 0);
         assertEquals(Integer.MAX_VALUE*4096L,offset);
-        file.delete();
     }
 }
diff --git a/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/discovery/UDPDiscoveryUnitTest.java b/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/discovery/UDPDiscoveryUnitTest.java
index 32855c1..97af7bf 100644
--- a/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/discovery/UDPDiscoveryUnitTest.java
+++ b/commons-jcs-core/src/test/java/org/apache/commons/jcs/utils/discovery/UDPDiscoveryUnitTest.java
@@ -88,5 +88,6 @@ public class UDPDiscoveryUnitTest
         // request braodcasts change things.
         assertTrue( "Receiver count [" + receiver.getCnt() + "] should be the at least the number sent [" + cnt + "].",
                     cnt <= receiver.getCnt() );
+        sender.close();
     }
 }