You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pirk.apache.org by ea...@apache.org on 2016/07/23 15:29:16 UTC

[4/4] incubator-pirk git commit: PIRK-11 Switch to SLF4J with Log4j2 bindings, including other minor cleanup - closes apache/incubator-pirk#20

PIRK-11 Switch to SLF4J with Log4j2 bindings, including other minor cleanup - closes apache/incubator-pirk#20


Project: http://git-wip-us.apache.org/repos/asf/incubator-pirk/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-pirk/commit/ef8d1c1a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-pirk/tree/ef8d1c1a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-pirk/diff/ef8d1c1a

Branch: refs/heads/master
Commit: ef8d1c1a56093d8da617652bef1fbe8a35ea6987
Parents: ec51044
Author: smarthi <sm...@apache.org>
Authored: Sat Jul 23 11:28:58 2016 -0400
Committer: eawilliams <ea...@apache.org>
Committed: Sat Jul 23 11:28:58 2016 -0400

----------------------------------------------------------------------
 pom.xml                                         |  14 +++
 .../pirk/benchmark/PaillierBenchmark.java       |  10 +-
 .../pirk/encryption/ModPowAbstraction.java      |   4 +-
 .../org/apache/pirk/encryption/Paillier.java    |  10 +-
 .../apache/pirk/encryption/PrimeGenerator.java  |  42 +++----
 .../inputformat/hadoop/BaseInputFormat.java     |   4 +-
 .../inputformat/hadoop/BytesArrayWritable.java  |  17 +--
 .../inputformat/hadoop/InputFormatConst.java    |   6 +-
 .../inputformat/hadoop/TextArrayWritable.java   |   4 +-
 .../hadoop/json/JSONInputFormatBase.java        |   6 +-
 .../hadoop/json/JSONRecordReader.java           |  25 ++--
 .../apache/pirk/querier/wideskies/Querier.java  |  34 ++----
 .../pirk/querier/wideskies/QuerierConst.java    |   4 +-
 .../pirk/querier/wideskies/QuerierDriver.java   |  18 +--
 .../querier/wideskies/QuerierDriverCLI.java     |  15 ++-
 .../wideskies/decrypt/DecryptResponse.java      |  26 ++--
 .../decrypt/DecryptResponseRunnable.java        |  26 ++--
 .../querier/wideskies/encrypt/EncryptQuery.java |  34 +++---
 .../wideskies/encrypt/EncryptQueryRunnable.java |  22 ++--
 .../wideskies/encrypt/ExpTableRunnable.java     |  22 ++--
 .../org/apache/pirk/query/wideskies/Query.java  |  48 ++++----
 .../apache/pirk/query/wideskies/QueryInfo.java  |  40 +++----
 .../apache/pirk/query/wideskies/QueryUtils.java |  29 ++---
 .../pirk/responder/wideskies/ResponderCLI.java  |  28 ++---
 .../responder/wideskies/ResponderDriver.java    |   4 +-
 .../wideskies/common/ComputeEncryptedRow.java   |  53 ++++-----
 .../common/HashSelectorAndPartitionData.java    |  23 ++--
 .../wideskies/mapreduce/ColumnMultMapper.java   |  14 +--
 .../wideskies/mapreduce/ColumnMultReducer.java  |  16 +--
 .../mapreduce/ComputeResponseTool.java          |  72 +++++------
 .../wideskies/mapreduce/ExpTableMapper.java     |  22 ++--
 .../wideskies/mapreduce/ExpTableReducer.java    |  14 +--
 .../mapreduce/FinalResponseReducer.java         |  24 ++--
 .../HashSelectorsAndPartitionDataMapper.java    |  26 ++--
 .../responder/wideskies/mapreduce/MRStats.java  |   8 +-
 .../wideskies/mapreduce/RowCalcReducer.java     |  35 +++---
 .../responder/wideskies/spark/Accumulators.java |  16 +--
 .../wideskies/spark/BroadcastVars.java          |  16 +--
 .../wideskies/spark/ComputeExpLookupTable.java  |  17 ++-
 .../wideskies/spark/ComputeResponse.java        |  58 +++++----
 .../spark/EncColMultGroupedMapper.java          |  20 ++--
 .../wideskies/spark/EncColMultReducer.java      |  17 +--
 .../responder/wideskies/spark/EncRowCalc.java   |  43 +++----
 .../spark/EncRowCalcPrecomputedCache.java       |  45 +++----
 .../wideskies/spark/ExpKeyFilenameMap.java      |  17 ++-
 .../wideskies/spark/ExpTableGenerator.java      |  25 ++--
 .../responder/wideskies/spark/FilterData.java   |  20 ++--
 .../spark/HashSelectorsAndPartitionData.java    |  27 ++---
 .../wideskies/standalone/Responder.java         |  29 +++--
 .../pirk/response/wideskies/Response.java       |  16 +--
 .../org/apache/pirk/schema/data/DataSchema.java |  30 ++---
 .../pirk/schema/data/LoadDataSchemas.java       |  32 ++---
 .../data/partitioner/DataPartitioner.java       |  16 +--
 .../data/partitioner/IPDataPartitioner.java     |  12 +-
 .../partitioner/ISO8601DatePartitioner.java     |   6 +-
 .../partitioner/PrimitiveTypePartitioner.java   |  12 +-
 .../pirk/schema/query/LoadQuerySchemas.java     |  26 ++--
 .../apache/pirk/schema/query/QuerySchema.java   |  22 ++--
 .../pirk/schema/query/filter/DataFilter.java    |   4 +-
 .../pirk/schema/query/filter/FilterFactory.java |  10 +-
 .../schema/query/filter/StopListFilter.java     |  14 +--
 .../pirk/schema/response/QueryResponseJSON.java |  22 ++--
 .../test/distributed/DistributedTestCLI.java    |  14 +--
 .../test/distributed/DistributedTestDriver.java |  10 +-
 .../distributed/testsuite/DistTestSuite.java    |  14 +--
 .../org/apache/pirk/test/utils/BaseTests.java   |  51 ++++----
 .../java/org/apache/pirk/test/utils/Inputs.java |  65 +++++-----
 .../apache/pirk/test/utils/StandaloneQuery.java |  20 ++--
 .../org/apache/pirk/test/utils/TestUtils.java   |  20 ++--
 .../org/apache/pirk/utils/CSVOutputUtils.java   |   9 +-
 .../org/apache/pirk/utils/EpochDateParser.java  |   9 +-
 .../java/org/apache/pirk/utils/FileConst.java   |   4 +-
 .../java/org/apache/pirk/utils/FileIOUtils.java |   9 +-
 src/main/java/org/apache/pirk/utils/HDFS.java   |  11 +-
 .../apache/pirk/utils/ISO8601DateParser.java    |  11 +-
 .../java/org/apache/pirk/utils/KeyedHash.java   |  13 +-
 .../java/org/apache/pirk/utils/LogUtils.java    | 119 -------------------
 .../org/apache/pirk/utils/PIRException.java     |   4 +-
 .../org/apache/pirk/utils/QueryParserUtils.java |  46 +++----
 .../org/apache/pirk/utils/StopListUtils.java    |   4 +-
 .../java/org/apache/pirk/utils/StringUtils.java |  17 ++-
 .../apache/pirk/utils/SystemConfiguration.java  |  29 ++---
 .../test/general/ISO8601DateParserTest.java     |  13 +-
 src/test/java/test/general/KeyedHashTest.java   |  13 +-
 src/test/java/test/general/PaillierTest.java    |  64 +++++-----
 .../java/test/general/PartitionUtilsTest.java   |  23 ++--
 .../java/test/general/QueryParserUtilsTest.java |  23 ++--
 .../test/schema/data/LoadDataSchemaTest.java    |  29 +++--
 .../test/schema/query/LoadQuerySchemaTest.java  |  41 +++----
 .../wideskies/standalone/StandaloneTest.java    |  12 +-
 90 files changed, 938 insertions(+), 1130 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 996c629..20db598 100644
--- a/pom.xml
+++ b/pom.xml
@@ -80,6 +80,7 @@
 		<jmh.version>1.11.3</jmh.version>
 		<benchmarkjar.name>benchmarks</benchmarkjar.name>
 		<javac.target>1.7</javac.target>
+		<slf4j.version>1.7.21</slf4j.version>
 	</properties>
 
 	<dependencies>
@@ -181,6 +182,19 @@
 			<version>${jmh.version}</version>
 		</dependency>
 
+		<!-- Sl4j modules -->
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>${slf4j.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.apache.logging.log4j</groupId>
+			<artifactId>log4j-slf4j-impl</artifactId>
+			<version>2.6.2</version>
+		</dependency>
+
 	</dependencies>
 
 	<build>

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java b/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
index dd6b57f..3ffee5f 100644
--- a/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
+++ b/src/main/java/org/apache/pirk/benchmark/PaillierBenchmark.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,16 +15,14 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 
 package org.apache.pirk.benchmark;
 
 import java.math.BigInteger;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.ModPowAbstraction;
 import org.apache.pirk.encryption.Paillier;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
 import org.apache.pirk.utils.SystemConfiguration;
 import org.openjdk.jmh.annotations.Benchmark;
@@ -33,6 +31,8 @@ import org.openjdk.jmh.annotations.Mode;
 import org.openjdk.jmh.annotations.Scope;
 import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.State;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A JMH benchmark to evaluate Paillier performance both with and without using com.square.jnagmp.gmp to accelerate modPow
@@ -43,7 +43,7 @@ import org.openjdk.jmh.annotations.State;
 public class PaillierBenchmark
 {
   private static final int MODULUS_SIZE = 3074;
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(PaillierBenchmark.class);
 
   @State(Scope.Benchmark)
   public static class PaillierBenchmarkState

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/encryption/ModPowAbstraction.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/encryption/ModPowAbstraction.java b/src/main/java/org/apache/pirk/encryption/ModPowAbstraction.java
index 350b3a6..4fd2dcd 100644
--- a/src/main/java/org/apache/pirk/encryption/ModPowAbstraction.java
+++ b/src/main/java/org/apache/pirk/encryption/ModPowAbstraction.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.encryption;
 
 import java.math.BigInteger;

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/encryption/Paillier.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java
index 27b1774..b3d1a7a 100644
--- a/src/main/java/org/apache/pirk/encryption/Paillier.java
+++ b/src/main/java/org/apache/pirk/encryption/Paillier.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.encryption;
 
 import java.io.Serializable;
@@ -23,10 +23,10 @@ import java.math.BigInteger;
 import java.security.GeneralSecurityException;
 import java.security.SecureRandom;
 
-import org.apache.log4j.Logger;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
 import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implementation of the Paillier cryptosystem
@@ -69,7 +69,7 @@ public class Paillier implements Cloneable, Serializable
 {
   private static final long serialVersionUID = 1L;
 
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(Paillier.class);
 
   private static final SecureRandom secureRandom;
 

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java b/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java
index 01684aa..f529df0 100644
--- a/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java
+++ b/src/main/java/org/apache/pirk/encryption/PrimeGenerator.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.encryption;
 
 import java.math.BigDecimal;
@@ -23,9 +23,9 @@ import java.math.BigInteger;
 import java.util.HashMap;
 import java.util.Random;
 
-import org.apache.log4j.Logger;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to generate the primes used in the Paillier cryptosystem
@@ -53,10 +53,10 @@ import org.apache.pirk.utils.SystemConfiguration;
  */
 public class PrimeGenerator
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(PrimeGenerator.class);
 
-  private static final HashMap<Integer,BigInteger> lowerBoundCache = new HashMap<Integer,BigInteger>();
-  private static final HashMap<Integer,BigInteger> minimumDifferenceCache = new HashMap<Integer,BigInteger>();
+  private static final HashMap<Integer,BigInteger> lowerBoundCache = new HashMap<>();
+  private static final HashMap<Integer,BigInteger> minimumDifferenceCache = new HashMap<>();
 
   private static boolean additionalChecksEnabled = SystemConfiguration.getProperty("pallier.FIPSPrimeGenerationChecks").equals("true");
 
@@ -69,7 +69,7 @@ public class PrimeGenerator
    */
   public static BigInteger getSinglePrime(int bitLength, int certainty, Random rnd)
   {
-    BigInteger p = null;
+    BigInteger p;
 
     logger.debug("bitLength " + bitLength + " certainty " + certainty + " random " + rnd);
 
@@ -85,15 +85,15 @@ public class PrimeGenerator
       int roundsLeft = calcNumAdditionalMillerRabinRounds(bitLength);
 
       // Calculate the lower bound (\sqrt(2))(2^(bitLength/2) \u2013 1)) for use in FIPS 186-4 B.3.3, step 4.4
-      BigInteger lowerBound = null;
-      if (!lowerBoundCache.containsKey(Integer.valueOf(bitLength)))
+      BigInteger lowerBound;
+      if (!lowerBoundCache.containsKey(bitLength))
       {
         lowerBound = BigDecimal.valueOf(Math.sqrt(2)).multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger();
-        lowerBoundCache.put(Integer.valueOf(bitLength), lowerBound);
+        lowerBoundCache.put(bitLength, lowerBound);
       }
       else
       {
-        lowerBound = lowerBoundCache.get(Integer.valueOf(bitLength));
+        lowerBound = lowerBoundCache.get(bitLength);
       }
 
       // Complete FIPS 186-4 B.3.3, steps 4.2 - 4.5
@@ -130,7 +130,7 @@ public class PrimeGenerator
    */
   public static BigInteger getSecondPrime(int bitLength, int certainty, Random rnd, BigInteger p)
   {
-    BigInteger q = null;
+    BigInteger q;
 
     logger.debug("bitLength " + bitLength + " certainty " + certainty + " random " + rnd);
 
@@ -146,27 +146,27 @@ public class PrimeGenerator
       int roundsLeft = calcNumAdditionalMillerRabinRounds(bitLength);
 
       // Calculate the lower bound (\sqrt(2))(2^(bitLength/2) \u2013 1)) for use in FIPS 186-4 B.3.3, step 5.5
-      BigInteger lowerBound = null;
-      if (!lowerBoundCache.containsKey(Integer.valueOf(bitLength)))
+      BigInteger lowerBound;
+      if (!lowerBoundCache.containsKey(bitLength))
       {
         lowerBound = BigDecimal.valueOf(Math.sqrt(2)).multiply(BigDecimal.valueOf(2).pow((bitLength / 2) - 1)).toBigInteger();
-        lowerBoundCache.put(Integer.valueOf(bitLength), lowerBound);
+        lowerBoundCache.put(bitLength, lowerBound);
       }
       else
       {
-        lowerBound = lowerBoundCache.get(Integer.valueOf(bitLength));
+        lowerBound = lowerBoundCache.get(bitLength);
       }
 
       // Compute the minimumDifference 2^((bitLength/2) \u2013 100) for use in FIPS 186-4 B.3.3, step 5.4
-      BigInteger minimumDifference = null;
-      if (!minimumDifferenceCache.containsKey(Integer.valueOf(bitLength)))
+      BigInteger minimumDifference;
+      if (!minimumDifferenceCache.containsKey(bitLength))
       {
         minimumDifference = BigDecimal.valueOf(2).pow(bitLength / 2 - 100).toBigInteger();
-        minimumDifferenceCache.put(Integer.valueOf(bitLength), minimumDifference);
+        minimumDifferenceCache.put(bitLength, minimumDifference);
       }
       else
       {
-        minimumDifference = minimumDifferenceCache.get(Integer.valueOf(bitLength));
+        minimumDifference = minimumDifferenceCache.get(bitLength);
       }
 
       // Complete FIPS 186-4 B.3.3, steps 5.2 - 5.6

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java b/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
index 7bd6c1e..1445432 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop;
 
 import org.apache.hadoop.mapreduce.InputFormat;

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java b/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java
index 53aab08..48078df 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/BytesArrayWritable.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop;
 
 import java.io.ByteArrayOutputStream;
@@ -26,8 +26,8 @@ import java.util.Arrays;
 
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.BytesWritable;
-import org.apache.log4j.Logger;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * ArrayWritable class with ByteWritable entries
@@ -35,7 +35,7 @@ import org.apache.pirk.utils.LogUtils;
  */
 public class BytesArrayWritable extends ArrayWritable
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(BytesArrayWritable.class);
 
   public BytesArrayWritable()
   {
@@ -147,16 +147,11 @@ public class BytesArrayWritable extends ArrayWritable
   // assumes big endian ordering -- will not change value
   private byte[] pad(byte[] bytes) throws IOException
   {
-    byte[] paddedBytes = null;
-
     byte[] zeroByte = new byte[] {0};
 
     ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
     outputStream.write(zeroByte);
     outputStream.write(bytes);
-
-    paddedBytes = outputStream.toByteArray();
-
-    return paddedBytes;
+    return outputStream.toByteArray();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java b/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
index d45d900..d904c53 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/InputFormatConst.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop;
 
 import java.util.ArrayList;
@@ -30,5 +30,5 @@ public class InputFormatConst
 
   public static String ES = "elasticsearch";
 
-  public static ArrayList<String> ALLOWED_FORMATS = new ArrayList<String>(Arrays.asList(BASE_FORMAT, ES));
+  public static ArrayList<String> ALLOWED_FORMATS = new ArrayList<>(Arrays.asList(BASE_FORMAT, ES));
 }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/TextArrayWritable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/TextArrayWritable.java b/src/main/java/org/apache/pirk/inputformat/hadoop/TextArrayWritable.java
index 371f811..fe4625b 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/TextArrayWritable.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/TextArrayWritable.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop;
 
 import java.math.BigInteger;

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONInputFormatBase.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONInputFormatBase.java b/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONInputFormatBase.java
index 67204e6..67e40c7 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONInputFormatBase.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONInputFormatBase.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop.json;
 
 import java.io.IOException;
@@ -34,7 +34,7 @@ import org.apache.pirk.inputformat.hadoop.BaseInputFormat;
  */
 public class JSONInputFormatBase extends BaseInputFormat<Text,MapWritable>
 {
-  JSONInputFormat jsonInputFormat = new JSONInputFormat();
+  private JSONInputFormat jsonInputFormat = new JSONInputFormat();
 
   @Override
   public RecordReader<Text,MapWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1) throws IOException, InterruptedException

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONRecordReader.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONRecordReader.java b/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONRecordReader.java
index ab92401..80fd64f 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONRecordReader.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/json/JSONRecordReader.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.inputformat.hadoop.json;
 
 import java.io.IOException;
@@ -28,17 +28,17 @@ import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.LineRecordReader;
-import org.apache.log4j.Logger;
 import org.apache.pirk.inputformat.hadoop.TextArrayWritable;
 import org.apache.pirk.schema.data.DataSchema;
 import org.apache.pirk.schema.data.LoadDataSchemas;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.QueryParserUtils;
 import org.apache.pirk.utils.StringUtils;
 import org.apache.pirk.utils.SystemConfiguration;
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Record reader to parse files of JSON string representations, one per line
@@ -46,14 +46,14 @@ import org.json.simple.parser.ParseException;
  */
 public class JSONRecordReader extends RecordReader<Text,MapWritable>
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(JSONRecordReader.class);
 
-  LineRecordReader lineReader = null;
-  Text key = null;
-  MapWritable value = null;
-  JSONParser jsonParser = null;
-  String queryString = null;
-  DataSchema dataSchema = null;
+  private LineRecordReader lineReader = null;
+  private Text key = null;
+  private MapWritable value = null;
+  private JSONParser jsonParser = null;
+  private String queryString = null;
+  private DataSchema dataSchema = null;
 
   @Override
   public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException
@@ -154,8 +154,7 @@ public class JSONRecordReader extends RecordReader<Text,MapWritable>
       toMapWritable(line);
 
       // Check to see if the record satisfies the query
-      boolean satisfiesQuery = QueryParserUtils.checkRecord(queryString, value, dataSchema);
-      return satisfiesQuery;
+      return QueryParserUtils.checkRecord(queryString, value, dataSchema);
 
     } catch (ParseException e)
     {

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
index 25b4b41..7ffc7a0 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,12 +15,11 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.ObjectInputStream;
@@ -29,11 +28,11 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.Paillier;
 import org.apache.pirk.query.wideskies.Query;
 import org.apache.pirk.query.wideskies.QueryInfo;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to hold the information necessary for the PIR querier to perform decryption
@@ -43,20 +42,20 @@ public class Querier implements Serializable
 {
   private static final long serialVersionUID = 1L;
 
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(Querier.class);
 
-  QueryInfo queryInfo = null;
+  private QueryInfo queryInfo = null;
 
-  Query query = null; // contains the query vectors and functionality
+  private Query query = null; // contains the query vectors and functionality
 
-  Paillier paillier = null; // Paillier encryption functionality
+  private Paillier paillier = null; // Paillier encryption functionality
 
-  ArrayList<String> selectors = null; // selectors for the watchlist
+  private ArrayList<String> selectors = null; // selectors for the watchlist
 
   // map to check the embedded selectors in the results for false positives;
   // if the selector is a fixed size < 32 bits, it is included as is
   // if the selector is of variable lengths
-  HashMap<Integer,String> embedSelectorMap = null;
+  private HashMap<Integer,String> embedSelectorMap = null;
 
   public Querier(QueryInfo queryInfoInput, ArrayList<String> selectorsInput, Paillier paillierInput, Query pirQueryInput,
       HashMap<Integer,String> embedSelectorMapInput)
@@ -138,9 +137,8 @@ public class Querier implements Serializable
    */
   public static Querier readFromFile(String filename) throws IOException
   {
-    Querier querier = readFromFile(new File(filename));
 
-    return querier;
+    return readFromFile(new File(filename));
   }
 
   /**
@@ -151,19 +149,13 @@ public class Querier implements Serializable
     Querier querier = null;
 
     FileInputStream fIn = null;
-    ObjectInputStream oIn = null;
+    ObjectInputStream oIn;
     try
     {
       fIn = new FileInputStream(file);
       oIn = new ObjectInputStream(fIn);
       querier = (Querier) oIn.readObject();
-    } catch (FileNotFoundException e)
-    {
-      e.printStackTrace();
-    } catch (IOException e)
-    {
-      e.printStackTrace();
-    } catch (ClassNotFoundException e)
+    } catch (IOException | ClassNotFoundException e)
     {
       e.printStackTrace();
     } finally

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/QuerierConst.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierConst.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierConst.java
index eeab489..a034461 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierConst.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierConst.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriver.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriver.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriver.java
index 916489c..01a6c86 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriver.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriver.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies;
 
 import java.io.IOException;
@@ -23,7 +23,6 @@ import java.io.Serializable;
 import java.math.BigInteger;
 import java.util.ArrayList;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.Paillier;
 import org.apache.pirk.querier.wideskies.decrypt.DecryptResponse;
 import org.apache.pirk.querier.wideskies.encrypt.EncryptQuery;
@@ -31,9 +30,10 @@ import org.apache.pirk.query.wideskies.QueryInfo;
 import org.apache.pirk.response.wideskies.Response;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
 import org.apache.pirk.utils.FileIOUtils;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
 import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Driver class for encryption of a query or decryption of a response
@@ -72,16 +72,16 @@ import org.apache.pirk.utils.SystemConfiguration;
 public class QuerierDriver implements Serializable
 {
   private static final long serialVersionUID = 1L;
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(QuerierDriver.class);
 
   public static void main(String... args) throws IOException, InterruptedException, PIRException
   {
     // General variables
-    String action = null;
-    String inputFile = null;
-    String outputFile = null;
+    String action;
+    String inputFile;
+    String outputFile;
     String queryType = null;
-    int numThreads = 1;
+    int numThreads;
 
     // Encryption variables
     int hashBitSize = 0;

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriverCLI.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriverCLI.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriverCLI.java
index e2c1adf..9012b51 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriverCLI.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDriverCLI.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies;
 
 import org.apache.commons.cli.CommandLine;
@@ -24,22 +24,21 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
-import org.apache.log4j.Logger;
-import org.apache.pirk.querier.wideskies.QuerierConst;
 import org.apache.pirk.schema.data.LoadDataSchemas;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class for parsing the command line options for the QuerierDriver
  */
 public class QuerierDriverCLI
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(QuerierDriverCLI.class);
 
-  Options cliOptions = null;
-  CommandLine commandLine = null;
+  private Options cliOptions = null;
+  private CommandLine commandLine = null;
 
   // General variables
   public static String ACTION = "action";

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
index 63ae9b9..8eebf82 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies.decrypt;
 
 import java.io.BufferedWriter;
@@ -31,14 +31,14 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.Paillier;
 import org.apache.pirk.querier.wideskies.Querier;
 import org.apache.pirk.query.wideskies.QueryInfo;
 import org.apache.pirk.response.wideskies.Response;
 import org.apache.pirk.schema.response.QueryResponseJSON;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to perform PIR decryption
@@ -46,20 +46,20 @@ import org.apache.pirk.utils.PIRException;
  */
 public class DecryptResponse
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(DecryptResponse.class);
 
-  Response response = null;
+  private Response response = null;
 
-  Querier querier = null;
+  private Querier querier = null;
 
-  HashMap<String,ArrayList<QueryResponseJSON>> resultMap = null; // selector -> ArrayList of hits
+  private HashMap<String,ArrayList<QueryResponseJSON>> resultMap = null; // selector -> ArrayList of hits
 
   public DecryptResponse(Response responseInput, Querier querierInput)
   {
     response = responseInput;
     querier = querierInput;
 
-    resultMap = new HashMap<String,ArrayList<QueryResponseJSON>>();
+    resultMap = new HashMap<>();
   }
 
   /**
@@ -98,7 +98,7 @@ public class DecryptResponse
     int dataPartitionBitSize = queryInfo.getDataPartitionBitSize();
 
     // Initialize the result map and masks-- removes initialization checks from code below
-    HashMap<String,BigInteger> selectorMaskMap = new HashMap<String,BigInteger>();
+    HashMap<String,BigInteger> selectorMaskMap = new HashMap<>();
     int selectorNum = 0;
     BigInteger twoBI = BigInteger.valueOf(2);
     for (String selector : selectors)
@@ -121,7 +121,7 @@ public class DecryptResponse
     }
     int elementsPerThread = (int) (Math.floor(selectors.size() / numThreads));
 
-    ArrayList<DecryptResponseRunnable> runnables = new ArrayList<DecryptResponseRunnable>();
+    ArrayList<DecryptResponseRunnable> runnables = new ArrayList<>();
     for (int i = 0; i < numThreads; ++i)
     {
       // Grab the range of the thread and create the corresponding partition of selectors
@@ -131,7 +131,7 @@ public class DecryptResponse
       {
         stop = selectors.size() - 1;
       }
-      TreeMap<Integer,String> selectorsPartition = new TreeMap<Integer,String>();
+      TreeMap<Integer,String> selectorsPartition = new TreeMap<>();
       for (int j = start; j <= stop; ++j)
       {
         selectorsPartition.put(j, selectors.get(j));
@@ -165,7 +165,7 @@ public class DecryptResponse
   // extract and reconstruct the data elements
   private ArrayList<BigInteger> decryptElements(TreeMap<Integer,BigInteger> elements, Paillier paillier)
   {
-    ArrayList<BigInteger> decryptedElements = new ArrayList<BigInteger>();
+    ArrayList<BigInteger> decryptedElements = new ArrayList<>();
 
     for (BigInteger encElement : elements.values())
     {

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
index a70a6f6..35482c3 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponseRunnable.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies.decrypt;
 
 import java.math.BigInteger;
@@ -23,13 +23,13 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.TreeMap;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.query.wideskies.QueryInfo;
 import org.apache.pirk.query.wideskies.QueryUtils;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
 import org.apache.pirk.schema.query.QuerySchema;
 import org.apache.pirk.schema.response.QueryResponseJSON;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runnable class for multithreaded PIR decryption
@@ -39,15 +39,15 @@ import org.apache.pirk.utils.LogUtils;
  */
 public class DecryptResponseRunnable implements Runnable
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(DecryptResponseRunnable.class);
 
-  HashMap<String,ArrayList<QueryResponseJSON>> resultMap = null; // selector -> ArrayList of hits
+  private HashMap<String,ArrayList<QueryResponseJSON>> resultMap = null; // selector -> ArrayList of hits
 
-  ArrayList<BigInteger> rElements = null;
-  TreeMap<Integer,String> selectors = null;
-  HashMap<String,BigInteger> selectorMaskMap = null;
-  QueryInfo queryInfo = null;
-  HashMap<Integer,String> embedSelectorMap = null;
+  private ArrayList<BigInteger> rElements = null;
+  private TreeMap<Integer,String> selectors = null;
+  private HashMap<String,BigInteger> selectorMaskMap = null;
+  private QueryInfo queryInfo = null;
+  private HashMap<Integer,String> embedSelectorMap = null;
 
   public DecryptResponseRunnable(ArrayList<BigInteger> rElementsInput, TreeMap<Integer,String> selectorsInput, HashMap<String,BigInteger> selectorMaskMapInput,
       QueryInfo queryInfoInput, HashMap<Integer,String> embedSelectorMapInput)
@@ -58,7 +58,7 @@ public class DecryptResponseRunnable implements Runnable
     queryInfo = queryInfoInput;
     embedSelectorMap = embedSelectorMapInput;
 
-    resultMap = new HashMap<String,ArrayList<QueryResponseJSON>>();
+    resultMap = new HashMap<>();
   }
 
   public HashMap<String,ArrayList<QueryResponseJSON>> getResultMap()
@@ -96,7 +96,7 @@ public class DecryptResponseRunnable implements Runnable
         String selector = selectors.get(selectorIndex);
         logger.debug("selector = " + selector);
 
-        ArrayList<BigInteger> parts = new ArrayList<BigInteger>();
+        ArrayList<BigInteger> parts = new ArrayList<>();
         int partNum = 0;
         boolean zeroElement = true;
         while (partNum < numPartitionsPerDataElement)

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java
index ca38fc5..87ee9d9 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQuery.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies.encrypt;
 
 import java.io.File;
@@ -27,7 +27,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.Paillier;
 import org.apache.pirk.querier.wideskies.Querier;
 import org.apache.pirk.querier.wideskies.QuerierConst;
@@ -39,8 +38,9 @@ import org.apache.pirk.schema.data.LoadDataSchemas;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
 import org.apache.pirk.schema.query.QuerySchema;
 import org.apache.pirk.utils.KeyedHash;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to perform PIR encryption
@@ -48,22 +48,22 @@ import org.apache.pirk.utils.PIRException;
  */
 public class EncryptQuery
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(EncryptQuery.class);
 
-  QueryInfo queryInfo = null; // contains basic query information and functionality
+  private QueryInfo queryInfo = null; // contains basic query information and functionality
 
-  Query query = null; // contains the query vectors
+  private Query query = null; // contains the query vectors
 
-  Querier querier = null; // contains the query vectors and encryption object
+  private Querier querier = null; // contains the query vectors and encryption object
 
-  Paillier paillier = null; // Paillier encryption functionality
+  private Paillier paillier = null; // Paillier encryption functionality
 
-  ArrayList<String> selectors = null; // selectors for the query
+  private ArrayList<String> selectors = null; // selectors for the query
 
   // Map to check the embedded selectors in the results for false positives;
   // if the selector is a fixed size < 32 bits, it is included as is
   // if the selector is of variable lengths
-  HashMap<Integer,String> embedSelectorMap = null;
+  private HashMap<Integer,String> embedSelectorMap = null;
 
   public EncryptQuery(QueryInfo queryInfoInput, ArrayList<String> selectorsInput, Paillier paillierInput)
   {
@@ -73,7 +73,7 @@ public class EncryptQuery
 
     paillier = paillierInput;
 
-    embedSelectorMap = new HashMap<Integer,String>();
+    embedSelectorMap = new HashMap<>();
   }
 
   public Paillier getPaillier()
@@ -146,8 +146,8 @@ public class EncryptQuery
     String hashKey = queryInfo.getHashKey();
     int keyCounter = 0;
     int numSelectors = selectors.size();
-    HashSet<Integer> hashes = new HashSet<Integer>(numSelectors);
-    HashMap<Integer,Integer> selectorQueryVecMapping = new HashMap<Integer,Integer>(numSelectors);
+    HashSet<Integer> hashes = new HashSet<>(numSelectors);
+    HashMap<Integer,Integer> selectorQueryVecMapping = new HashMap<>(numSelectors);
 
     for (int index = 0; index < numSelectors; index++)
     {
@@ -202,7 +202,7 @@ public class EncryptQuery
   {
     // Encrypt and form the query vector
     ExecutorService es = Executors.newCachedThreadPool();
-    ArrayList<EncryptQueryRunnable> runnables = new ArrayList<EncryptQueryRunnable>(numThreads);
+    ArrayList<EncryptQueryRunnable> runnables = new ArrayList<>(numThreads);
     int numElements = 1 << queryInfo.getHashBitSize();  // 2^hashBitSize
 
     // Split the work across the requested number of threads
@@ -218,14 +218,14 @@ public class EncryptQuery
       }
 
       // Copy selectorQueryVecMapping (if numThreads > 1) so we don't have to synchronize - only has size = selectors.size()
-      HashMap<Integer,Integer> selectorQueryVecMappingCopy = null;
+      HashMap<Integer,Integer> selectorQueryVecMappingCopy;
       if (numThreads == 1)
       {
         selectorQueryVecMappingCopy = selectorQueryVecMapping;
       }
       else
       {
-        selectorQueryVecMappingCopy = new HashMap<Integer,Integer>(selectorQueryVecMapping);
+        selectorQueryVecMappingCopy = new HashMap<>(selectorQueryVecMapping);
       }
 
       // Create the runnable and execute

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
index 04fb929..565560b 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,17 +15,17 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies.encrypt;
 
 import java.math.BigInteger;
 import java.util.HashMap;
 import java.util.TreeMap;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.Paillier;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.PIRException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runnable class for multithreaded PIR encryption
@@ -33,16 +33,16 @@ import org.apache.pirk.utils.PIRException;
  */
 public class EncryptQueryRunnable implements Runnable
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(EncryptQueryRunnable.class);
 
-  int dataPartitionBitSize = 0;
-  int start = 0; // start of computing range for the runnable
-  int stop = 0; // stop, inclusive, of the computing range for the runnable
+  private int dataPartitionBitSize = 0;
+  private int start = 0; // start of computing range for the runnable
+  private int stop = 0; // stop, inclusive, of the computing range for the runnable
 
-  Paillier paillier = null;
-  HashMap<Integer,Integer> selectorQueryVecMapping = null;
+  private Paillier paillier = null;
+  private HashMap<Integer,Integer> selectorQueryVecMapping = null;
 
-  TreeMap<Integer,BigInteger> encryptedValues = null; // holds the ordered encrypted values to pull after thread computation is complete
+  private TreeMap<Integer,BigInteger> encryptedValues = null; // holds the ordered encrypted values to pull after thread computation is complete
 
   public EncryptQueryRunnable(int dataPartitionBitSizeInput, Paillier paillierInput, HashMap<Integer,Integer> selectorQueryVecMappingInput, int startInput,
       int stopInput)

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/ExpTableRunnable.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/ExpTableRunnable.java b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/ExpTableRunnable.java
index a76000d..aaa0a81 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/ExpTableRunnable.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/ExpTableRunnable.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,16 +15,16 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies.encrypt;
 
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.HashMap;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.ModPowAbstraction;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runnable class for modular exponential table creation
@@ -32,16 +32,16 @@ import org.apache.pirk.utils.LogUtils;
  */
 public class ExpTableRunnable implements Runnable
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(ExpTableRunnable.class);
 
-  int dataPartitionBitSize = 0;
-  BigInteger NSquared = null;
-  ArrayList<BigInteger> queryElements = null;
+  private int dataPartitionBitSize = 0;
+  private BigInteger NSquared = null;
+  private ArrayList<BigInteger> queryElements = null;
 
   // lookup table for exponentiation of query vectors -
   // based on dataPartitionBitSize
   // element -> <power, element^power mod N^2>
-  HashMap<BigInteger,HashMap<Integer,BigInteger>> expTable = null;
+  private HashMap<BigInteger,HashMap<Integer,BigInteger>> expTable = null;
 
   public ExpTableRunnable(int dataPartitionBitSizeInput, BigInteger NSquaredInput, ArrayList<BigInteger> queryElementsInput)
   {
@@ -49,7 +49,7 @@ public class ExpTableRunnable implements Runnable
     NSquared = NSquaredInput;
     queryElements = queryElementsInput;
 
-    expTable = new HashMap<BigInteger,HashMap<Integer,BigInteger>>();
+    expTable = new HashMap<>();
   }
 
   @Override
@@ -60,7 +60,7 @@ public class ExpTableRunnable implements Runnable
     {
       logger.debug("element = " + element.toString(2) + " maxValue = " + maxValue + " dataPartitionBitSize = " + dataPartitionBitSize);
 
-      HashMap<Integer,BigInteger> powMap = new HashMap<Integer,BigInteger>(); // <power, element^power mod N^2>
+      HashMap<Integer,BigInteger> powMap = new HashMap<>(); // <power, element^power mod N^2>
       for (int i = 0; i <= maxValue; ++i)
       {
         BigInteger value = ModPowAbstraction.modPow(element, BigInteger.valueOf(i), NSquared);

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/query/wideskies/Query.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java
index 8ef9f1c..ebaafbb 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/Query.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,12 +15,11 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.query.wideskies;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.ObjectInputStream;
@@ -36,10 +35,10 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.ModPowAbstraction;
 import org.apache.pirk.querier.wideskies.encrypt.ExpTableRunnable;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to hold the PIR query vectors
@@ -49,22 +48,22 @@ public class Query implements Serializable
 {
   private static final long serialVersionUID = 1L;
 
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(Query.class);
 
-  QueryInfo qInfo = null; // holds all query info
+  private QueryInfo qInfo = null; // holds all query info
 
-  TreeMap<Integer,BigInteger> queryElements = null; // query elements - ordered on insertion
+  private TreeMap<Integer,BigInteger> queryElements = null; // query elements - ordered on insertion
 
   // lookup table for exponentiation of query vectors - based on dataPartitionBitSize
   // element -> <power, element^power mod N^2>
-  HashMap<BigInteger,HashMap<Integer,BigInteger>> expTable = null;
+  private HashMap<BigInteger,HashMap<Integer,BigInteger>> expTable = null;
 
   // File based lookup table for modular exponentiation
   // element hash -> filename containing it's <power, element^power mod N^2> modular exponentiations
-  HashMap<Integer,String> expFileBasedLookup = null;
+  private HashMap<Integer,String> expFileBasedLookup = null;
 
-  BigInteger N = null; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements
-  BigInteger NSquared = null;
+  private BigInteger N = null; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements
+  private BigInteger NSquared = null;
 
   public Query(QueryInfo queryInfoIn, BigInteger NInput)
   {
@@ -72,10 +71,10 @@ public class Query implements Serializable
     N = NInput;
     NSquared = N.pow(2);
 
-    queryElements = new TreeMap<Integer,BigInteger>();
-    expTable = new HashMap<BigInteger,HashMap<Integer,BigInteger>>();
+    queryElements = new TreeMap<>();
+    expTable = new HashMap<>();
 
-    expFileBasedLookup = new HashMap<Integer,String>();
+    expFileBasedLookup = new HashMap<>();
   }
 
   public QueryInfo getQueryInfo()
@@ -164,7 +163,7 @@ public class Query implements Serializable
       {
         logger.debug("element = " + element.toString(2) + " maxValue = " + maxValue + " dataPartitionBitSize = " + dataPartitionBitSize);
 
-        HashMap<Integer,BigInteger> powMap = new HashMap<Integer,BigInteger>(); // <power, element^power mod N^2>
+        HashMap<Integer,BigInteger> powMap = new HashMap<>(); // <power, element^power mod N^2>
         for (int i = 0; i <= maxValue; ++i)
         {
           BigInteger value = ModPowAbstraction.modPow(element, BigInteger.valueOf(i), NSquared);
@@ -180,7 +179,7 @@ public class Query implements Serializable
       ExecutorService es = Executors.newCachedThreadPool();
       int elementsPerThread = (int) (Math.floor(queryElements.size() / numThreads));
 
-      ArrayList<ExpTableRunnable> runnables = new ArrayList<ExpTableRunnable>();
+      ArrayList<ExpTableRunnable> runnables = new ArrayList<>();
       for (int i = 0; i < numThreads; ++i)
       {
         // Grab the range of the thread and create the corresponding partition of selectors
@@ -190,7 +189,7 @@ public class Query implements Serializable
         {
           stop = queryElements.size() - 1;
         }
-        ArrayList<BigInteger> queryElementsPartition = new ArrayList<BigInteger>();
+        ArrayList<BigInteger> queryElementsPartition = new ArrayList<>();
         for (int j = start; j <= stop; ++j)
         {
           queryElementsPartition.add(queryElements.get(j));
@@ -262,9 +261,8 @@ public class Query implements Serializable
    */
   public static Query readFromFile(String filename) throws IOException
   {
-    Query query = readFromFile(new File(filename));
 
-    return query;
+    return readFromFile(new File(filename));
   }
 
   /**
@@ -275,19 +273,13 @@ public class Query implements Serializable
     Query query = null;
 
     FileInputStream fIn = null;
-    ObjectInputStream oIn = null;
+    ObjectInputStream oIn;
     try
     {
       fIn = new FileInputStream(file);
       oIn = new ObjectInputStream(fIn);
       query = (Query) oIn.readObject();
-    } catch (FileNotFoundException e)
-    {
-      e.printStackTrace();
-    } catch (IOException e)
-    {
-      e.printStackTrace();
-    } catch (ClassNotFoundException e)
+    } catch (IOException | ClassNotFoundException e)
     {
       e.printStackTrace();
     } finally

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
index f87f3d1..3ec73ea 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,14 +15,14 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.query.wideskies;
 
 import java.io.Serializable;
 
-import org.apache.log4j.Logger;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
-import org.apache.pirk.utils.LogUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class to hold all of the basic information regarding a query
@@ -35,30 +35,30 @@ public class QueryInfo implements Serializable
 {
   private static final long serialVersionUID = 1L;
 
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class);
 
-  double queryNum = 0.0; // the identifier num of the query
-  int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize}
+  private double queryNum = 0.0; // the identifier num of the query
+  private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize}
 
-  String queryType = null; // QueryType string const
+  private String queryType = null; // QueryType string const
 
-  String queryName = null; // Name of query
+  private String queryName = null; // Name of query
 
-  int paillierBitSize = 0; // Paillier modulus size
+  private int paillierBitSize = 0; // Paillier modulus size
 
-  int hashBitSize = 0; // Bit size of the keyed hash function
-  String hashKey = null; // Key for the keyed hash function
+  private int hashBitSize = 0; // Bit size of the keyed hash function
+  private String hashKey = null; // Key for the keyed hash function
 
-  int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type
-  int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now
-  int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element
+  private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type
+  private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now
+  private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element
 
-  boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute
+  private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute
 
-  boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS
+  private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS
   // if it doesn't yet exist, it will be created within the cluster and stored in HDFS
 
-  boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low
+  private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low
 
   // false positive rate for variable length selectors and a zero false positive rate
   // for selectors of fixed size < 32 bits
@@ -176,9 +176,7 @@ public class QueryInfo implements Serializable
 
   public QueryInfo copy()
   {
-    QueryInfo queryInfo = new QueryInfo(this.queryNum, this.numSelectors, this.hashBitSize, this.hashKey, this.dataPartitionBitSize, this.queryType,
+    return new QueryInfo(this.queryNum, this.numSelectors, this.hashBitSize, this.hashKey, this.dataPartitionBitSize, this.queryType,
         this.queryName, this.paillierBitSize, this.useExpLookupTable, this.embedSelector, this.useHDFSExpLookupTable);
-
-    return queryInfo;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java b/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java
index a0b09d2..0bf2fb7 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/QueryUtils.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,19 +15,19 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.query.wideskies;
 
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.TreeSet;
 
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
 import org.apache.pirk.schema.data.DataSchema;
 import org.apache.pirk.schema.data.LoadDataSchemas;
 import org.apache.pirk.schema.data.partitioner.DataPartitioner;
@@ -36,18 +36,19 @@ import org.apache.pirk.schema.query.LoadQuerySchemas;
 import org.apache.pirk.schema.query.QuerySchema;
 import org.apache.pirk.schema.response.QueryResponseJSON;
 import org.apache.pirk.utils.KeyedHash;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.StringUtils;
 import org.apache.pirk.utils.SystemConfiguration;
 import org.elasticsearch.hadoop.mr.WritableArrayWritable;
 import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class for helper methods to perform the encrypted query
  */
 public class QueryUtils
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(QueryUtils.class);
 
   /**
    * Method to convert the given BigInteger raw data element partitions to a QueryResponseJSON object based upon the given queryType
@@ -108,7 +109,7 @@ public class QueryUtils
    */
   public static ArrayList<BigInteger> partitionDataElement(String queryType, JSONObject jsonData, boolean embedSelector) throws Exception
   {
-    ArrayList<BigInteger> parts = new ArrayList<BigInteger>();
+    ArrayList<BigInteger> parts = new ArrayList<>();
 
     logger.debug("queryType = " + queryType);
 
@@ -142,7 +143,7 @@ public class QueryUtils
         List<String> elementArray;
         if (dataElement == null)
         {
-          elementArray = Arrays.asList("0");
+          elementArray = Collections.singletonList("0");
         }
         else
         {
@@ -174,7 +175,7 @@ public class QueryUtils
   public static ArrayList<BigInteger> partitionDataElement(MapWritable dataMap, QuerySchema qSchema, DataSchema dSchema, boolean embedSelector)
       throws Exception
   {
-    ArrayList<BigInteger> parts = new ArrayList<BigInteger>();
+    ArrayList<BigInteger> parts = new ArrayList<>();
 
     logger.debug("queryType = " + qSchema.getSchemaName());
 
@@ -205,7 +206,7 @@ public class QueryUtils
         List<String> elementArray = null;
         if (dataElement == null)
         {
-          elementArray = Arrays.asList("");
+          elementArray = Collections.singletonList("");
         }
         else if (dataElement instanceof WritableArrayWritable)
         {
@@ -241,7 +242,7 @@ public class QueryUtils
    */
   public static ArrayList<BigInteger> embeddedSelectorToPartitions(Object selector, String type, Object partitioner) throws Exception
   {
-    ArrayList<BigInteger> parts = null;
+    ArrayList<BigInteger> parts;
 
     int partitionBits = ((DataPartitioner) partitioner).getBits(type);
     if (partitionBits > 32) // hash and add 32-bit hash value to partitions
@@ -264,7 +265,7 @@ public class QueryUtils
    */
   public static String getEmbeddedSelector(Object selector, String type, Object partitioner) throws Exception
   {
-    String embeddedSelector = null;
+    String embeddedSelector;
 
     int partitionBits = ((DataPartitioner) partitioner).getBits(type);
     if (partitionBits > 32) // hash and add 32-bit hash value to partitions
@@ -285,7 +286,7 @@ public class QueryUtils
    */
   public static String getEmbeddedSelectorFromPartitions(ArrayList<BigInteger> parts, int partsIndex, String type, Object partitioner) throws Exception
   {
-    String embeddedSelector = null;
+    String embeddedSelector;
 
     int partitionBits = ((DataPartitioner) partitioner).getBits(type);
     if (partitionBits > 32) // the embedded selector will be the 32-bit hash value of the hit selector
@@ -308,7 +309,7 @@ public class QueryUtils
    */
   public static String getSelectorByQueryType(MapWritable dataMap, QuerySchema qSchema, DataSchema dSchema)
   {
-    String selector = null;
+    String selector;
 
     String fieldName = qSchema.getSelectorName();
     if (dSchema.hasListRep(fieldName))
@@ -339,7 +340,7 @@ public class QueryUtils
    */
   public static String getSelectorByQueryTypeJSON(String queryType, JSONObject dataMap)
   {
-    String selector = null;
+    String selector;
 
     QuerySchema qSchema = LoadQuerySchemas.getSchema(queryType);
     DataSchema dSchema = LoadDataSchemas.getSchema(qSchema.getDataSchemaName());

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java b/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
index 5285a88..e412625 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.responder.wideskies;
 
 import org.apache.commons.cli.CommandLine;
@@ -24,22 +24,22 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
-import org.apache.log4j.Logger;
 import org.apache.pirk.inputformat.hadoop.InputFormatConst;
 import org.apache.pirk.schema.data.LoadDataSchemas;
 import org.apache.pirk.schema.query.LoadQuerySchemas;
-import org.apache.pirk.utils.LogUtils;
 import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class for parsing the command line options for the ResponderDriver
  */
 public class ResponderCLI
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(ResponderCLI.class);
 
-  Options cliOptions = null;
-  CommandLine commandLine = null;
+  private Options cliOptions = null;
+  private CommandLine commandLine = null;
 
   // Required args
   public static String PLATFORM = "platform";
@@ -54,19 +54,19 @@ public class ResponderCLI
   // Optional args
   public static String BASEINPUTFORMAT = "baseInputFormat";
   public static String STOPLISTFILE = "stopListFile";
-  public static String NUMREDUCETASKS = "numReduceTasks";
+  private static String NUMREDUCETASKS = "numReduceTasks";
   public static String USELOCALCACHE = "useLocalCache";
   public static String LIMITHITSPERSELECTOR = "limitHitsPerSelector";
   public static String MAXHITSPERSELECTOR = "maxHitsPerSelector";
-  public static String MAPMEMORY = "mapreduceMapMemoryMb";
-  public static String REDUCEMEMORY = "mapreduceReduceMemoryMb";
-  public static String MAPJAVAOPTS = "mapreduceMapJavaOpts";
-  public static String REDUCEJAVAOPTS = "mapreduceReduceJavaOpts";
+  private static String MAPMEMORY = "mapreduceMapMemoryMb";
+  private static String REDUCEMEMORY = "mapreduceReduceMemoryMb";
+  private static String MAPJAVAOPTS = "mapreduceMapJavaOpts";
+  private static String REDUCEJAVAOPTS = "mapreduceReduceJavaOpts";
   public static String QUERYSCHEMAS = "querySchemas";
   public static String DATASCHEMAS = "dataSchemas";
   public static String NUMEXPLOOKUPPARTS = "numExpLookupPartitions";
-  public static String USEHDFSLOOKUPTABLE = "useHDFSLookupTable";
-  public static String NUMDATAPARTITIONS = "numDataPartitions";
+  private static String USEHDFSLOOKUPTABLE = "useHDFSLookupTable";
+  private static String NUMDATAPARTITIONS = "numDataPartitions";
   public static String NUMCOLMULTPARTITIONS = "numColMultPartitions";
   public static String USEMODEXPJOIN = "useModExpJoin";
   public static String COLMULTREDUCEBYKEY = "colMultReduceByKey";

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/responder/wideskies/ResponderDriver.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/ResponderDriver.java b/src/main/java/org/apache/pirk/responder/wideskies/ResponderDriver.java
index d119adb..4cd6b5f 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/ResponderDriver.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/ResponderDriver.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.responder.wideskies;
 
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/ef8d1c1a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
index e4dcbed..0a0d7ef 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/common/ComputeEncryptedRow.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.responder.wideskies.common;
 
 import java.io.BufferedReader;
@@ -26,33 +26,32 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.concurrent.ExecutionException;
 
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.pirk.encryption.ModPowAbstraction;
 import org.apache.pirk.inputformat.hadoop.BytesArrayWritable;
 import org.apache.pirk.query.wideskies.Query;
-import org.apache.pirk.utils.LogUtils;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import scala.Tuple2;
 import scala.Tuple3;
 
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-
 /**
  * Class to compute the encrypted row elements for a query from extracted data partitions
  * 
  */
 public class ComputeEncryptedRow
 {
-  private static Logger logger = LogUtils.getLoggerForThisClass();
+  private static final Logger logger = LoggerFactory.getLogger(ComputeEncryptedRow.class);
 
   // Input: base, exponent, NSquared
   // <<base,exponent,NSquared>, base^exponent mod N^2>
-  static LoadingCache<Tuple3<BigInteger,BigInteger,BigInteger>,BigInteger> expCache = CacheBuilder.newBuilder().maximumSize(10000)
-      .build(new CacheLoader<Tuple3<BigInteger,BigInteger,BigInteger>,BigInteger>()
+  private static LoadingCache<Tuple3<BigInteger,BigInteger,BigInteger>,BigInteger> expCache =
+      CacheBuilder.newBuilder().maximumSize(10000)
+          .build(new CacheLoader<Tuple3<BigInteger,BigInteger,BigInteger>,BigInteger>()
       {
         @Override
         public BigInteger load(Tuple3<BigInteger,BigInteger,BigInteger> info) throws Exception
@@ -83,13 +82,13 @@ public class ComputeEncryptedRow
       BigInteger value = new BigInteger(expMod[1]);
 
       // Cache: <<base,exponent,NSquared>, base^exponent mod N^2>
-      Tuple3<BigInteger,BigInteger,BigInteger> key = new Tuple3<BigInteger,BigInteger,BigInteger>(base, exponent, query.getNSquared());
+      Tuple3<BigInteger,BigInteger,BigInteger> key = new Tuple3<>(base, exponent, query.getNSquared());
       expCache.put(key, value);
     }
   }
 
   /**
-   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@code<BytesArrayWritable>}
+   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@link <BytesArrayWritable>}
    * <p>
    * For each row (as indicated by key = hash(selector)), iterates over the dataPartitions and calculates the column values.
    * <p>
@@ -101,7 +100,7 @@ public class ComputeEncryptedRow
   public static ArrayList<Tuple2<Long,BigInteger>> computeEncRow(Iterable<BytesArrayWritable> dataPartitionsIter, Query query, int rowIndex,
       boolean limitHitsPerSelector, int maxHitsPerSelector, boolean useCache) throws IOException
   {
-    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<Tuple2<Long,BigInteger>>();
+    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<>();
 
     // Pull the corresponding encrypted row query
     BigInteger rowQuery = query.getQueryElement(rowIndex);
@@ -130,7 +129,7 @@ public class ComputeEncryptedRow
         {
           if (useCache)
           {
-            exp = expCache.get(new Tuple3<BigInteger,BigInteger,BigInteger>(rowQuery, part, query.getNSquared()));
+            exp = expCache.get(new Tuple3<>(rowQuery, part, query.getNSquared()));
           }
           else
           {
@@ -143,7 +142,7 @@ public class ComputeEncryptedRow
         logger.debug("rowIndex = " + rowIndex + " colCounter = " + colCounter + " part = " + part.toString() + " part binary = " + part.toString(2) + " exp = "
             + exp + " i = " + i + " partition = " + dataPartitions.getBigInteger(i) + " = " + dataPartitions.getBigInteger(i).toString(2));
 
-        returnPairs.add(new Tuple2<Long,BigInteger>(colCounter, exp));
+        returnPairs.add(new Tuple2<>(colCounter, exp));
 
         ++colCounter;
       }
@@ -153,7 +152,7 @@ public class ComputeEncryptedRow
   }
 
   /**
-   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@ArrayList<BigInteger>
+   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@link ArrayList<BigInteger>
    * 
    * 
    * }
@@ -168,7 +167,7 @@ public class ComputeEncryptedRow
   public static ArrayList<Tuple2<Long,BigInteger>> computeEncRowBI(Iterable<ArrayList<BigInteger>> dataPartitionsIter, Query query, int rowIndex,
       boolean limitHitsPerSelector, int maxHitsPerSelector, boolean useCache) throws IOException
   {
-    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<Tuple2<Long,BigInteger>>();
+    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<>();
 
     // Pull the corresponding encrypted row query
     BigInteger rowQuery = query.getQueryElement(rowIndex);
@@ -200,7 +199,7 @@ public class ComputeEncryptedRow
         {
           if (useCache)
           {
-            exp = expCache.get(new Tuple3<BigInteger,BigInteger,BigInteger>(rowQuery, part, query.getNSquared()));
+            exp = expCache.get(new Tuple3<>(rowQuery, part, query.getNSquared()));
           }
           else
           {
@@ -213,7 +212,7 @@ public class ComputeEncryptedRow
         logger.debug("rowIndex = " + rowIndex + " colCounter = " + colCounter + " part = " + part.toString() + " part binary = " + part.toString(2) + " exp = "
             + exp + " i = " + i);
 
-        returnPairs.add(new Tuple2<Long,BigInteger>(colCounter, exp));
+        returnPairs.add(new Tuple2<>(colCounter, exp));
 
         ++colCounter;
       }
@@ -229,7 +228,7 @@ public class ComputeEncryptedRow
   }
 
   /**
-   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@<BytesArrayWritable>
+   * Method to compute the encrypted row elements for a query from extracted data partitions in the form of Iterable{@link <BytesArrayWritable>
    * 
    * 
    * } given an input modular exponentiation table for the row
@@ -242,7 +241,7 @@ public class ComputeEncryptedRow
   public static ArrayList<Tuple2<Long,BigInteger>> computeEncRowCacheInput(Iterable<ArrayList<BigInteger>> dataPartitionsIter,
       HashMap<Integer,BigInteger> cache, int rowIndex, boolean limitHitsPerSelector, int maxHitsPerSelector) throws IOException
   {
-    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<Tuple2<Long,BigInteger>>();
+    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<>();
 
     long colCounter = 0;
     int elementCounter = 0;
@@ -267,7 +266,7 @@ public class ComputeEncryptedRow
 
         logger.debug("rowIndex = " + rowIndex + " colCounter = " + colCounter + " part = " + part.toString() + " exp = " + exp + " i = " + i);
 
-        returnPairs.add(new Tuple2<Long,BigInteger>(colCounter, exp));
+        returnPairs.add(new Tuple2<>(colCounter, exp));
 
         ++colCounter;
       }
@@ -290,7 +289,7 @@ public class ComputeEncryptedRow
    */
   public static ArrayList<Tuple2<Long,BigInteger>> computeEncRow(BytesArrayWritable dataPartitions, Query query, int rowIndex, int colIndex) throws IOException
   {
-    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<Tuple2<Long,BigInteger>>();
+    ArrayList<Tuple2<Long,BigInteger>> returnPairs = new ArrayList<>();
 
     // Pull the corresponding encrypted row query
     BigInteger rowQuery = query.getQueryElement(rowIndex);
@@ -308,7 +307,7 @@ public class ComputeEncryptedRow
       BigInteger exp = null;
       try
       {
-        exp = expCache.get(new Tuple3<BigInteger,BigInteger,BigInteger>(rowQuery, part, query.getNSquared()));
+        exp = expCache.get(new Tuple3<>(rowQuery, part, query.getNSquared()));
       } catch (ExecutionException e)
       {
         e.printStackTrace();
@@ -317,7 +316,7 @@ public class ComputeEncryptedRow
       logger.debug("rowIndex = " + rowIndex + " colCounter = " + colCounter + " part = " + part.toString() + " part binary = " + part.toString(2) + " exp = "
           + exp + " i = " + i + " partition = " + dataPartitions.getBigInteger(i) + " = " + dataPartitions.getBigInteger(i).toString(2));
 
-      returnPairs.add(new Tuple2<Long,BigInteger>(colCounter, exp));
+      returnPairs.add(new Tuple2<>(colCounter, exp));
 
       ++colCounter;
     }