You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by js...@apache.org on 2015/05/28 02:41:27 UTC

[2/2] drill git commit: DRILL-2923: Ensure all unit tests pass without assertions enabled Modified a number of tests not to use assert, but to instead use one of junit's assertTrue(), assertFalse(), or some other form. Modified test support code that use

DRILL-2923: Ensure all unit tests pass without assertions enabled Modified a number of tests not to use assert, but to instead use one of junit's assertTrue(), assertFalse(), or some other form. Modified test support code that used asserts to throw IllegalStateExceptions instead.


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/6f54223e
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/6f54223e
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/6f54223e

Branch: refs/heads/master
Commit: 6f54223e48f091cf5559b4f3035b4930a59992bf
Parents: 8e0f703
Author: Chris Westin <cw...@yahoo.com>
Authored: Wed May 20 12:00:40 2015 -0700
Committer: Jason Altekruse <al...@gmail.com>
Committed: Wed May 27 15:30:49 2015 -0700

----------------------------------------------------------------------
 .../common/expression/PathSegmentTests.java     |   9 +-
 .../drill/exec/TestHivePartitionPruning.java    |  26 +--
 .../java/org/apache/drill/PlanTestBase.java     |  94 ++++----
 .../compile/bytecode/ReplaceMethodInvoke.java   |   7 +-
 .../fn/interp/ExpressionInterpreterTest.java    | 132 ++++++------
 .../impl/mergereceiver/TestMergingReceiver.java |  77 +++----
 .../exec/physical/impl/sort/TestSimpleSort.java |  76 +++----
 .../ischema/TestInfoSchemaFilterPushDown.java   |  49 +++--
 .../drill/exec/store/parquet/FieldInfo.java     |  26 ++-
 .../store/parquet/ParquetRecordReaderTest.java  | 216 ++++++++++---------
 .../store/parquet/ParquetResultListener.java    |  66 +++---
 .../fn/TestJsonReaderWithSparseFiles.java       |  53 ++---
 .../vector/complex/writer/TestRepeated.java     |  60 +++---
 .../jdbc/DatabaseMetaDataGetColumnsTest.java    |  43 ++--
 .../jdbc/ResultSetGetMethodConversionsTest.java |   7 +-
 .../jdbc/proxy/TracingProxyDriverTest.java      |  20 +-
 ...etColumnsDataTypeNotTypeCodeIntBugsTest.java |  10 +-
 ...rill2461IntervalsBreakInfoSchemaBugTest.java |  32 +--
 ...2463GetNullsFailedWithAssertionsBugTest.java |  40 ++--
 .../apache/drill/jdbc/test/TestJdbcQuery.java   |  43 ++--
 20 files changed, 522 insertions(+), 564 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/common/src/test/java/org/apache/drill/common/expression/PathSegmentTests.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/drill/common/expression/PathSegmentTests.java b/common/src/test/java/org/apache/drill/common/expression/PathSegmentTests.java
index 07b2385..97cd578 100644
--- a/common/src/test/java/org/apache/drill/common/expression/PathSegmentTests.java
+++ b/common/src/test/java/org/apache/drill/common/expression/PathSegmentTests.java
@@ -17,14 +17,15 @@
  */
 package org.apache.drill.common.expression;
 
+import static org.junit.Assert.assertEquals;
+
 import org.apache.drill.test.DrillTest;
 import org.junit.Test;
 
 public class PathSegmentTests extends DrillTest {
-
   protected PathSegment makeArraySegment(final int len, final PathSegment tail) {
     PathSegment node = tail;
-    for (int i=0; i<len; i++) {
+    for (int i = 0; i < len; i++) {
       node = new PathSegment.ArraySegment(node);
     }
     return node;
@@ -35,11 +36,11 @@ public class PathSegmentTests extends DrillTest {
     final int levels = 10;
     final PathSegment segment = new PathSegment.NameSegment("test", makeArraySegment(levels, null));
     final PathSegment clone = segment.clone();
-    assert segment.equals(clone) : "result of clone & original segments must be identical";
+    assertEquals("result of clone & original segments must be identical", segment, clone);
 
     final PathSegment tail = new PathSegment.NameSegment("tail");
     final PathSegment newSegment = new PathSegment.NameSegment("test", makeArraySegment(levels, tail));
     final PathSegment newClone = segment.cloneWithNewChild(tail);
-    assert newSegment.equals(newClone) : "result of cloneWithChild & original segment must be identical";
+    assertEquals("result of cloneWithChild & original segment must be identical", newSegment, newClone);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHivePartitionPruning.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHivePartitionPruning.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHivePartitionPruning.java
index 7353e05..c846328 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHivePartitionPruning.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHivePartitionPruning.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec;
 
+import static org.junit.Assert.assertFalse;
+
 import org.apache.drill.exec.hive.HiveTestBase;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -25,11 +27,11 @@ public class TestHivePartitionPruning extends HiveTestBase {
   //Currently we do not have a good way to test plans so using a crude string comparison
   @Test
   public void testSimplePartitionFilter() throws Exception {
-    String query = "explain plan for select * from hive.`default`.partition_pruning_test where c = 1";
-    String plan = getPlanInString(query, OPTIQ_FORMAT);
+    final String query = "explain plan for select * from hive.`default`.partition_pruning_test where c = 1";
+    final String plan = getPlanInString(query, OPTIQ_FORMAT);
 
     // Check and make sure that Filter is not present in the plan
-    assert plan.contains("Filter") == false;
+    assertFalse(plan.contains("Filter"));
   }
 
   /* Partition pruning is not supported for disjuncts that do not meet pruning criteria.
@@ -37,28 +39,28 @@ public class TestHivePartitionPruning extends HiveTestBase {
    */
   @Ignore
   public void testDisjunctsPartitionFilter() throws Exception {
-    String query = "explain plan for select * from hive.`default`.partition_pruning_test where (c = 1) or (d = 1)";
-    String plan = getPlanInString(query, OPTIQ_FORMAT);
+    final String query = "explain plan for select * from hive.`default`.partition_pruning_test where (c = 1) or (d = 1)";
+    final String plan = getPlanInString(query, OPTIQ_FORMAT);
 
     // Check and make sure that Filter is not present in the plan
-    assert plan.contains("Filter") == false;
+    assertFalse(plan.contains("Filter"));
   }
 
   @Test
   public void testConjunctsPartitionFilter() throws Exception {
-    String query = "explain plan for select * from hive.`default`.partition_pruning_test where c = 1 and d = 1";
-    String plan = getPlanInString(query, OPTIQ_FORMAT);
+    final String query = "explain plan for select * from hive.`default`.partition_pruning_test where c = 1 and d = 1";
+    final String plan = getPlanInString(query, OPTIQ_FORMAT);
 
     // Check and make sure that Filter is not present in the plan
-    assert plan.contains("Filter") == false;
+    assertFalse(plan.contains("Filter"));
   }
 
   @Ignore("DRILL-1571")
   public void testComplexFilter() throws Exception {
-    String query = "explain plan for select * from hive.`default`.partition_pruning_test where (c = 1 and d = 1) or (c = 2 and d = 3)";
-    String plan = getPlanInString(query, OPTIQ_FORMAT);
+    final String query = "explain plan for select * from hive.`default`.partition_pruning_test where (c = 1 and d = 1) or (c = 2 and d = 3)";
+    final String plan = getPlanInString(query, OPTIQ_FORMAT);
 
     // Check and make sure that Filter is not present in the plan
-    assert plan.contains("Filter") == false;
+    assertFalse(plan.contains("Filter"));
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
index f909681..f9e0b00 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
@@ -18,6 +18,7 @@
 
 package org.apache.drill;
 
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.util.List;
@@ -54,10 +55,11 @@ public class PlanTestBase extends BaseTestQuery {
       throws Exception {
     sql = "EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(sql);
 
-    String planStr = getPlanInString(sql, JSON_FORMAT);
+    final String planStr = getPlanInString(sql, JSON_FORMAT);
 
-    for (String colNames : expectedSubstrs) {
-      assertTrue(String.format("Unable to find expected string %s in plan: %s!", colNames, planStr), planStr.contains(colNames));
+    for (final String colNames : expectedSubstrs) {
+      assertTrue(String.format("Unable to find expected string %s in plan: %s!", colNames, planStr),
+          planStr.contains(colNames));
     }
   }
 
@@ -77,25 +79,23 @@ public class PlanTestBase extends BaseTestQuery {
    */
   public static void testPlanMatchingPatterns(String query, String[] expectedPatterns, String[] excludedPatterns)
       throws Exception {
-    String plan = getPlanInString("EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(query), OPTIQ_FORMAT);
+    final String plan = getPlanInString("EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(query), OPTIQ_FORMAT);
 
-    Pattern p;
-    Matcher m;
     // Check and make sure all expected patterns are in the plan
     if (expectedPatterns != null) {
-      for (String s : expectedPatterns) {
-        p = Pattern.compile(s);
-        m = p.matcher(plan);
-        assert m.find() : EXPECTED_NOT_FOUND + s;
+      for (final String s : expectedPatterns) {
+        final Pattern p = Pattern.compile(s);
+        final Matcher m = p.matcher(plan);
+        assertTrue(EXPECTED_NOT_FOUND + s, m.find());
       }
     }
 
     // Check and make sure all excluded patterns are not in the plan
     if (excludedPatterns != null) {
-      for (String s : excludedPatterns) {
-        p = Pattern.compile(s);
-        m = p.matcher(plan);
-        assert ! m.find() : UNEXPECTED_FOUND + s;
+      for (final String s : excludedPatterns) {
+        final Pattern p = Pattern.compile(s);
+        final Matcher m = p.matcher(plan);
+        assertFalse(UNEXPECTED_FOUND + s, m.find());
       }
     }
   }
@@ -124,20 +124,21 @@ public class PlanTestBase extends BaseTestQuery {
 
     // Check and make sure all expected patterns are in the plan
     if (expectedPatterns != null) {
-      for (String s : expectedPatterns) {
-        assert plan.contains(s) : EXPECTED_NOT_FOUND + s;
+      for (final String s : expectedPatterns) {
+        assertTrue(EXPECTED_NOT_FOUND + s, plan.contains(s));
       }
     }
 
     // Check and make sure all excluded patterns are not in the plan
     if (excludedPatterns != null) {
-      for (String s : excludedPatterns) {
-        assert ! plan.contains(s) : UNEXPECTED_FOUND + s;
+      for (final String s : excludedPatterns) {
+        assertFalse(UNEXPECTED_FOUND + s, plan.contains(s));
       }
     }
   }
 
-  public static void testPlanOneExpectedPatternOneExcluded(String query, String expectedPattern, String excludedPattern) throws Exception {
+  public static void testPlanOneExpectedPatternOneExcluded(
+      String query, String expectedPattern, String excludedPattern) throws Exception {
     testPlanMatchingPatterns(query, new String[]{expectedPattern}, new String[]{excludedPattern});
   }
 
@@ -156,12 +157,12 @@ public class PlanTestBase extends BaseTestQuery {
    * string.
    */
   public static void testRelLogicalJoinOrder(String sql, String... expectedSubstrs) throws Exception {
-    String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.LOGICAL);
-
-    String prefixJoinOrder = getLogicalPrefixJoinOrderFromPlan(planStr);
+    final String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.LOGICAL);
+    final String prefixJoinOrder = getLogicalPrefixJoinOrderFromPlan(planStr);
     System.out.println(" prefix Join order = \n" + prefixJoinOrder);
-    for (String substr : expectedSubstrs) {
-      assertTrue(String.format("Expected string %s is not in the prefixJoinOrder %s!", substr, prefixJoinOrder), prefixJoinOrder.contains(substr));
+    for (final String substr : expectedSubstrs) {
+      assertTrue(String.format("Expected string %s is not in the prefixJoinOrder %s!", substr, prefixJoinOrder),
+          prefixJoinOrder.contains(substr));
     }
   }
 
@@ -172,12 +173,12 @@ public class PlanTestBase extends BaseTestQuery {
    * string.
    */
   public static void testRelPhysicalJoinOrder(String sql, String... expectedSubstrs) throws Exception {
-    String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.PHYSICAL);
-
-    String prefixJoinOrder = getPhysicalPrefixJoinOrderFromPlan(planStr);
+    final String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.PHYSICAL);
+    final String prefixJoinOrder = getPhysicalPrefixJoinOrderFromPlan(planStr);
     System.out.println(" prefix Join order = \n" + prefixJoinOrder);
-    for (String substr : expectedSubstrs) {
-      assertTrue(String.format("Expected string %s is not in the prefixJoinOrder %s!", substr, prefixJoinOrder), prefixJoinOrder.contains(substr));
+    for (final String substr : expectedSubstrs) {
+      assertTrue(String.format("Expected string %s is not in the prefixJoinOrder %s!", substr, prefixJoinOrder),
+          prefixJoinOrder.contains(substr));
     }
   }
 
@@ -190,8 +191,7 @@ public class PlanTestBase extends BaseTestQuery {
   public static void testRelPhysicalPlanLevDigest(String sql, String... expectedSubstrs)
       throws Exception {
     final String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.DIGEST_ATTRIBUTES, Depth.PHYSICAL);
-
-    for (String substr : expectedSubstrs) {
+    for (final String substr : expectedSubstrs) {
       assertTrue(planStr.contains(substr));
     }
   }
@@ -207,7 +207,7 @@ public class PlanTestBase extends BaseTestQuery {
     final String planStr = getDrillRelPlanInString(sql,
         SqlExplainLevel.DIGEST_ATTRIBUTES, Depth.LOGICAL);
 
-    for (String substr : expectedSubstrs) {
+    for (final String substr : expectedSubstrs) {
       assertTrue(planStr.contains(substr));
     }
   }
@@ -221,7 +221,7 @@ public class PlanTestBase extends BaseTestQuery {
   public static void testRelPhysicalPlanLevExplain(String sql, String... expectedSubstrs) throws Exception {
     final String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.PHYSICAL);
 
-    for (String substr : expectedSubstrs) {
+    for (final String substr : expectedSubstrs) {
       assertTrue(planStr.contains(substr));
     }
   }
@@ -235,7 +235,7 @@ public class PlanTestBase extends BaseTestQuery {
   public static void testRelLogicalPlanLevExplain(String sql, String... expectedSubstrs) throws Exception {
     final String planStr = getDrillRelPlanInString(sql, SqlExplainLevel.EXPPLAN_ATTRIBUTES, Depth.LOGICAL);
 
-    for (String substr : expectedSubstrs) {
+    for (final String substr : expectedSubstrs) {
       assertTrue(planStr.contains(substr));
     }
   }
@@ -288,32 +288,30 @@ public class PlanTestBase extends BaseTestQuery {
    */
   protected static String getPlanInString(String sql, String columnName)
       throws Exception {
-    List<QueryDataBatch> results = testSqlWithResults(sql);
-
-    RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
-    StringBuilder builder = new StringBuilder();
+    final List<QueryDataBatch> results = testSqlWithResults(sql);
+    final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
+    final StringBuilder builder = new StringBuilder();
 
-    for (QueryDataBatch b : results) {
+    for (final QueryDataBatch b : results) {
       if (!b.hasData()) {
         continue;
       }
 
       loader.load(b.getHeader().getDef(), b.getData());
 
-      VectorWrapper<?> vw;
+      final VectorWrapper<?> vw;
       try {
           vw = loader.getValueAccessorById(
-              NullableVarCharVector.class, //
-              loader.getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldIds() //
-              );
+              NullableVarCharVector.class,
+              loader.getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldIds());
       } catch (Throwable t) {
         throw new Exception("Looks like you did not provide an explain plan query, please add EXPLAIN PLAN FOR to the beginning of your query.");
       }
 
       System.out.println(vw.getValueVector().getField().toExpr());
-      ValueVector vv = vw.getValueVector();
+      final ValueVector vv = vw.getValueVector();
       for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
-        Object o = vv.getAccessor().getObject(i);
+        final Object o = vv.getAccessor().getObject(i);
         builder.append(o);
         System.out.println(vv.getAccessor().getObject(i));
       }
@@ -333,14 +331,12 @@ public class PlanTestBase extends BaseTestQuery {
   }
 
   private static String getPrefixJoinOrderFromPlan(String plan, String joinKeyWord, String scanKeyWord) {
-    StringBuilder builder = new StringBuilder();
-
+    final StringBuilder builder = new StringBuilder();
     final String[] planLines = plan.split("\n");
     int cnt = 0;
-
     final Stack<Integer> s = new Stack<>();
 
-    for (String line : planLines) {
+    for (final String line : planLines) {
       if (line.trim().isEmpty()) {
         continue;
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
index bc2d929..345ac3c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/bytecode/ReplaceMethodInvoke.java
@@ -37,7 +37,7 @@ import com.google.common.io.Files;
 import com.google.common.io.Resources;
 
 public class ReplaceMethodInvoke {
-//  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReplaceMethodInvoke.class);
+  // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReplaceMethodInvoke.class);
 
   public static void main(String[] args) throws Exception {
     final String k2 = "org/apache/drill/Pickle.class";
@@ -74,7 +74,10 @@ public class ReplaceMethodInvoke {
     final PrintWriter pw = new PrintWriter(sw);
     DrillCheckClassAdapter.verify(new ClassReader(cw.toByteArray()), false, pw);
 
-    assert sw.toString().length() == 0 : sw.toString();
+    final String checkString = sw.toString();
+    if (!checkString.isEmpty()) {
+      throw new IllegalStateException(checkString);
+    }
   }
 
   private static ClassWriter writer() {

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
index 2a83a53..4d2ad02 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
@@ -57,14 +57,14 @@ import org.junit.Test;
 import com.google.common.collect.Lists;
 
 public class ExpressionInterpreterTest  extends PopUnitTestBase {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExpressionInterpreterTest.class);
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExpressionInterpreterTest.class);
 
   @Test
   public void interpreterNullableStrExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
-    String expressionStr =  "substr(col1, 1, 3)";
-    String[] expectedFirstTwoValues = {"aaa", "null"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
+    final String expressionStr =  "substr(col1, 1, 3)";
+    final String[] expectedFirstTwoValues = {"aaa", "null"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
@@ -72,10 +72,10 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
 
   @Test
   public void interpreterNullableBooleanExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
-    String expressionStr =  "col1 < 'abc' and col1 > 'abc'";
-    String[] expectedFirstTwoValues = {"false", "null"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
+    final String expressionStr =  "col1 < 'abc' and col1 > 'abc'";
+    final String[] expectedFirstTwoValues = {"false", "null"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
@@ -83,67 +83,67 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
 
   @Test
   public void interpreterNullableIntegerExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.INT)};
-    String expressionStr = "col1 + 100 - 1 * 2 + 2";
-    String[] expectedFirstTwoValues = {"-2147483548", "null"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.INT)};
+    final String expressionStr = "col1 + 100 - 1 * 2 + 2";
+    final String[] expectedFirstTwoValues = {"-2147483548", "null"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
 
   @Test
   public void interpreterLikeExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
-    String expressionStr =  "like(col1, 'aaa%')";
-    String[] expectedFirstTwoValues = {"true", "null"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
+    final String expressionStr =  "like(col1, 'aaa%')";
+    final String[] expectedFirstTwoValues = {"true", "null"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
 
   @Test
   public void interpreterCastExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
-    String expressionStr =  "cast(3+4 as float8)";
-    String[] expectedFirstTwoValues = {"7.0", "7.0"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
+    final String expressionStr =  "cast(3+4 as float8)";
+    final String[] expectedFirstTwoValues = {"7.0", "7.0"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
 
   @Test
   public void interpreterCaseExpr() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
-    String expressionStr =  "case when substr(col1, 1, 3)='aaa' then 'ABC' else 'XYZ' end";
-    String[] expectedFirstTwoValues = {"ABC", "XYZ"};
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.VARCHAR)};
+    final String expressionStr =  "case when substr(col1, 1, 3)='aaa' then 'ABC' else 'XYZ' end";
+    final String[] expectedFirstTwoValues = {"ABC", "XYZ"};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues);
   }
 
   @Test
   public void interpreterDateTest() throws Exception {
-    String[] colNames = {"col1"};
-    TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.INT)};
-    String expressionStr = "now()";
-    BitControl.PlanFragment planFragment = BitControl.PlanFragment.getDefaultInstance();
-    QueryContextInformation queryContextInfo = planFragment.getContext();
-    int                        timeZoneIndex = queryContextInfo.getTimeZone();
-    org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex));
-    org.joda.time.DateTime     now = new org.joda.time.DateTime(queryContextInfo.getQueryStartTime(), timeZone);
+    final String[] colNames = {"col1"};
+    final TypeProtos.MajorType[] colTypes = {Types.optional(TypeProtos.MinorType.INT)};
+    final String expressionStr = "now()";
+    final BitControl.PlanFragment planFragment = BitControl.PlanFragment.getDefaultInstance();
+    final QueryContextInformation queryContextInfo = planFragment.getContext();
+    final int                        timeZoneIndex = queryContextInfo.getTimeZone();
+    final org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex));
+    final org.joda.time.DateTime     now = new org.joda.time.DateTime(queryContextInfo.getQueryStartTime(), timeZone);
 
-    long queryStartDate = now.getMillis();
+    final long queryStartDate = now.getMillis();
 
-    TimeStampHolder out = new TimeStampHolder();
+    final TimeStampHolder out = new TimeStampHolder();
 
     out.value = queryStartDate;
 
-    ByteBuffer buffer = ByteBuffer.allocate(12);
+    final ByteBuffer buffer = ByteBuffer.allocate(12);
     buffer.putLong(out.value);
-    long l = buffer.getLong(0);
-    DateTime t = new DateTime(l);
+    final long l = buffer.getLong(0);
+    final DateTime t = new DateTime(l);
 
-    String[] expectedFirstTwoValues = {t.toString(), t.toString()};
+    final String[] expectedFirstTwoValues = {t.toString(), t.toString()};
 
     doTest(expressionStr, colNames, colTypes, expectedFirstTwoValues, planFragment);
   }
@@ -154,32 +154,31 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
   }
 
   protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorType[] colTypes, String[] expectFirstTwoValues, BitControl.PlanFragment planFragment) throws Exception {
-    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
-
-    Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+    final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+    final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
 
     bit1.run();
 
     // Create a mock scan batch as input for evaluation.
-    assert(colNames.length == colTypes.length);
+    assertEquals(colNames.length, colTypes.length);
 
-    MockGroupScanPOP.MockColumn[] columns = new MockGroupScanPOP.MockColumn[colNames.length];
+    final MockGroupScanPOP.MockColumn[] columns = new MockGroupScanPOP.MockColumn[colNames.length];
 
     for (int i = 0; i < colNames.length; i++ ) {
       columns[i] = new MockGroupScanPOP.MockColumn(colNames[i], colTypes[i].getMinorType(), colTypes[i].getMode(),0,0,0);
     }
 
-    MockGroupScanPOP.MockScanEntry entry = new MockGroupScanPOP.MockScanEntry(10, columns);
-    MockSubScanPOP scanPOP = new MockSubScanPOP("testTable", java.util.Collections.singletonList(entry));
+    final MockGroupScanPOP.MockScanEntry entry = new MockGroupScanPOP.MockScanEntry(10, columns);
+    final MockSubScanPOP scanPOP = new MockSubScanPOP("testTable", java.util.Collections.singletonList(entry));
 
-    ScanBatch batch = createMockScanBatch(bit1, scanPOP, planFragment);
+    final ScanBatch batch = createMockScanBatch(bit1, scanPOP, planFragment);
 
     batch.next();
 
-    ValueVector vv = evalExprWithInterpreter(expressionStr, batch, bit1);
+    final ValueVector vv = evalExprWithInterpreter(expressionStr, batch, bit1);
 
     // Verify the first 2 values in the output of evaluation.
-    assert(expectFirstTwoValues.length == 2);
+    assertEquals(2, expectFirstTwoValues.length);
     assertEquals(expectFirstTwoValues[0], getValueFromVector(vv, 0));
     assertEquals(expectFirstTwoValues[1], getValueFromVector(vv, 1));
 
@@ -191,13 +190,13 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
     bit1.close();
   }
 
-
   private ScanBatch createMockScanBatch(Drillbit bit, MockSubScanPOP scanPOP, BitControl.PlanFragment planFragment) {
-    List<RecordBatch> children = Lists.newArrayList();
-    MockScanBatchCreator creator = new MockScanBatchCreator();
+    final List<RecordBatch> children = Lists.newArrayList();
+    final MockScanBatchCreator creator = new MockScanBatchCreator();
 
     try {
-      FragmentContext context = new FragmentContext(bit.getContext(), planFragment, null, bit.getContext().getFunctionImplementationRegistry());
+      final FragmentContext context =
+          new FragmentContext(bit.getContext(), planFragment, null, bit.getContext().getFunctionImplementationRegistry());
       return creator.getBatch(context,scanPOP, children);
     } catch (Exception ex) {
       throw new DrillRuntimeException("Error when setup fragment context" + ex);
@@ -205,28 +204,26 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
   }
 
   private LogicalExpression parseExpr(String expr) throws RecognitionException {
-    ExprLexer lexer = new ExprLexer(new ANTLRStringStream(expr));
-    CommonTokenStream tokens = new CommonTokenStream(lexer);
-    ExprParser parser = new ExprParser(tokens);
-    ExprParser.parse_return ret = parser.parse();
+    final ExprLexer lexer = new ExprLexer(new ANTLRStringStream(expr));
+    final CommonTokenStream tokens = new CommonTokenStream(lexer);
+    final ExprParser parser = new ExprParser(tokens);
+    final ExprParser.parse_return ret = parser.parse();
     return ret.e;
   }
 
   private ValueVector evalExprWithInterpreter(String expression, RecordBatch batch, Drillbit bit) throws Exception {
-    LogicalExpression expr = parseExpr(expression);
-    ErrorCollector error = new ErrorCollectorImpl();
-    LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, batch, error, bit.getContext().getFunctionImplementationRegistry());
+    final LogicalExpression expr = parseExpr(expression);
+    final ErrorCollector error = new ErrorCollectorImpl();
+    final LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(expr, batch, error, bit.getContext().getFunctionImplementationRegistry());
     if (error.getErrorCount() != 0) {
       logger.error("Failure while materializing expression [{}].  Errors: {}", expression, error);
       assertEquals(0, error.getErrorCount());
     }
 
     final MaterializedField outputField = MaterializedField.create("outCol", materializedExpr.getMajorType());
-
-    ValueVector vector = TypeHelper.getNewVector(outputField, bit.getContext().getAllocator());
+    final ValueVector vector = TypeHelper.getNewVector(outputField, bit.getContext().getAllocator());
 
     vector.allocateNewSafe();
-
     InterpreterEvaluator.evaluate(batch, vector, materializedExpr);
 
     return vector;
@@ -234,8 +231,8 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
 
   private void showValueVectorContent(ValueVector vw) {
     for (int row = 0; row < vw.getAccessor().getValueCount(); row ++ ) {
-      Object o = vw.getAccessor().getObject(row);
-      String cellString;
+      final Object o = vw.getAccessor().getObject(row);
+      final String cellString;
       if (o instanceof byte[]) {
         cellString = DrillStringUtils.toBinaryString((byte[]) o);
       } else {
@@ -246,8 +243,8 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
   }
 
   private String getValueFromVector(ValueVector vw, int index) {
-    Object o = vw.getAccessor().getObject(index);
-    String cellString;
+    final Object o = vw.getAccessor().getObject(index);
+    final String cellString;
     if (o instanceof byte[]) {
       cellString = DrillStringUtils.toBinaryString((byte[]) o);
     } else {
@@ -255,5 +252,4 @@ public class ExpressionInterpreterTest  extends PopUnitTestBase {
     }
     return cellString;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
index 0122c08..f57d7a9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
@@ -38,38 +38,38 @@ import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
 public class TestMergingReceiver extends PopUnitTestBase {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergingReceiver.class);
+  // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergingReceiver.class);
 
   @Test
   public void twoBitTwoExchange() throws Exception {
-    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+    final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
-        Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
-        DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
+    try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+        final Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
+        final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
       bit1.run();
       bit2.run();
       client.connect();
-      List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+      final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
         Files.toString(FileUtils.getResourceAsFile("/mergerecv/merging_receiver.json"),
           Charsets.UTF_8));
       int count = 0;
-      RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
+      final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
       // print the results
-      for (QueryDataBatch b : results) {
+      for (final QueryDataBatch b : results) {
         count += b.getHeader().getRowCount();
         for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
-          List<Object> row = Lists.newArrayList();
+          final List<Object> row = Lists.newArrayList();
           batchLoader.load(b.getHeader().getDef(), b.getData());
-          for (VectorWrapper<?> vw : batchLoader) {
+          for (final VectorWrapper<?> vw : batchLoader) {
             row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
           }
-          for (Object cell : row) {
+          for (final Object cell : row) {
             if (cell == null) {
 //              System.out.print("<null>    ");
               continue;
             }
-            int len = cell.toString().length();
+            final int len = cell.toString().length();
 //            System.out.print(cell + " ");
             for (int i = 0; i < (30 - len); ++i) {
 //              System.out.print(" ");
@@ -86,31 +86,32 @@ public class TestMergingReceiver extends PopUnitTestBase {
 
   @Test
   public void testMultipleProvidersMixedSizes() throws Exception {
-    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+    final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
-        Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
-        DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
+    try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+        final Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
+        final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
 
       bit1.run();
       bit2.run();
       client.connect();
-      List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                                                        Files.toString(FileUtils.getResourceAsFile("/mergerecv/multiple_providers.json"),
-                                                                        Charsets.UTF_8));
+      final List<QueryDataBatch> results =
+          client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+              Files.toString(FileUtils.getResourceAsFile("/mergerecv/multiple_providers.json"),
+                  Charsets.UTF_8));
       int count = 0;
-      RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
+      final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
       // print the results
       Long lastBlueValue = null;
-      for (QueryDataBatch b : results) {
+      for (final QueryDataBatch b : results) {
         count += b.getHeader().getRowCount();
         for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
-          List<Object> row = Lists.newArrayList();
+          final List<Object> row = Lists.newArrayList();
           batchLoader.load(b.getHeader().getDef(), b.getData());
-          for (VectorWrapper vw : batchLoader) {
+          for (final VectorWrapper vw : batchLoader) {
             row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
             if (vw.getValueVector().getField().getAsSchemaPath().getRootSegment().getPath().equals("blue")) {
-              // assert order is ascending
+              // check that order is ascending
               if (((Long)vw.getValueVector().getAccessor().getObject(valueIdx)).longValue() == 0) {
                 continue; // ignore initial 0's from sort
               }
@@ -120,7 +121,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
               lastBlueValue = (Long)vw.getValueVector().getAccessor().getObject(valueIdx);
             }
           }
-          for (Object cell : row) {
+          for (final Object cell : row) {
             int len = cell.toString().length();
 //            System.out.print(cell + " ");
             for (int i = 0; i < (30 - len); ++i) {
@@ -138,30 +139,31 @@ public class TestMergingReceiver extends PopUnitTestBase {
 
   @Test
   public void handleEmptyBatch() throws Exception {
-    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+    final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
-        Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
-        DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
+    try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+        final Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
+        final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());) {
 
       bit1.run();
       bit2.run();
       client.connect();
-      List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                                                        Files.toString(FileUtils.getResourceAsFile("/mergerecv/empty_batch.json"),
-                                                                        Charsets.UTF_8));
+      final List<QueryDataBatch> results =
+          client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+              Files.toString(FileUtils.getResourceAsFile("/mergerecv/empty_batch.json"),
+                  Charsets.UTF_8));
       int count = 0;
-      RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
+      final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
       // print the results
-      for (QueryDataBatch b : results) {
+      for (final QueryDataBatch b : results) {
         count += b.getHeader().getRowCount();
         for (int valueIdx = 0; valueIdx < b.getHeader().getRowCount(); valueIdx++) {
-          List<Object> row = Lists.newArrayList();
+          final List<Object> row = Lists.newArrayList();
           batchLoader.load(b.getHeader().getDef(), b.getData());
-          for (VectorWrapper vw : batchLoader) {
+          for (final VectorWrapper vw : batchLoader) {
             row.add(vw.getValueVector().getField().toExpr() + ":" + vw.getValueVector().getAccessor().getObject(valueIdx));
           }
-          for (Object cell : row) {
+          for (final Object cell : row) {
             if (cell == null) {
 //              System.out.print("<null>    ");
               continue;
@@ -180,5 +182,4 @@ public class TestMergingReceiver extends PopUnitTestBase {
       assertEquals(100000, count);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
index f37624a..d51a017 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.sort;
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import mockit.Injectable;
 import mockit.NonStrictExpectations;
@@ -51,14 +52,11 @@ import com.google.common.io.Files;
 
 @Ignore
 public class TestSimpleSort extends ExecTest {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSimpleSort.class);
-  DrillConfig c = DrillConfig.create();
-
+  // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSimpleSort.class);
+  private final DrillConfig c = DrillConfig.create();
 
   @Test
   public void sortOneKeyAscending(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable{
-
-
     new NonStrictExpectations(){{
       bitContext.getMetrics(); result = new MetricRegistry();
       bitContext.getAllocator(); result = new TopLevelAllocator();
@@ -67,39 +65,35 @@ public class TestSimpleSort extends ExecTest {
       bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
     }};
 
-
-    PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
-    PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/one_key_sort.json"), Charsets.UTF_8));
-    FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
-    FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
-    SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
+    final PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/one_key_sort.json"), Charsets.UTF_8));
+    final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
+    final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+    final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
 
     int previousInt = Integer.MIN_VALUE;
-
     int recordCount = 0;
     int batchCount = 0;
 
-    while(exec.next()){
+    while(exec.next()) {
       batchCount++;
-      IntVector c1 = exec.getValueVectorById(new SchemaPath("blue", ExpressionPosition.UNKNOWN), IntVector.class);
-      IntVector c2 = exec.getValueVectorById(new SchemaPath("green", ExpressionPosition.UNKNOWN), IntVector.class);
+      final IntVector c1 = exec.getValueVectorById(new SchemaPath("blue", ExpressionPosition.UNKNOWN), IntVector.class);
+      final IntVector c2 = exec.getValueVectorById(new SchemaPath("green", ExpressionPosition.UNKNOWN), IntVector.class);
 
-      IntVector.Accessor a1 = c1.getAccessor();
-      IntVector.Accessor a2 = c2.getAccessor();
+      final IntVector.Accessor a1 = c1.getAccessor();
+      final IntVector.Accessor a2 = c2.getAccessor();
 
-      for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+      for(int i =0; i < c1.getAccessor().getValueCount(); i++) {
         recordCount++;
-        assert previousInt <= a1.get(i);
+        assertTrue(previousInt <= a1.get(i));
         previousInt = a1.get(i);
-        assert previousInt == a2.get(i);
+        assertEquals(previousInt, a2.get(i));
       }
-
-
     }
 
     System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
 
-    if(context.getFailureCause() != null){
+    if(context.getFailureCause() != null) {
       throw context.getFailureCause();
     }
     assertTrue(!context.isFailed());
@@ -107,8 +101,6 @@ public class TestSimpleSort extends ExecTest {
 
   @Test
   public void sortTwoKeysOneAscendingOneDescending(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable{
-
-
     new NonStrictExpectations(){{
       bitContext.getMetrics(); result = new MetricRegistry();
       bitContext.getAllocator(); result = new TopLevelAllocator();
@@ -117,11 +109,10 @@ public class TestSimpleSort extends ExecTest {
       bitContext.getCompiler(); result = CodeCompiler.getTestCompiler(c);
     }};
 
-
-    PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
-    PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/two_key_sort.json"), Charsets.UTF_8));
-    FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
-    FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
+    final PhysicalPlanReader reader = new PhysicalPlanReader(c, c.getMapper(), CoordinationProtos.DrillbitEndpoint.getDefaultInstance());
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/two_key_sort.json"), Charsets.UTF_8));
+    final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
+    final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
 
     int previousInt = Integer.MIN_VALUE;
@@ -130,38 +121,33 @@ public class TestSimpleSort extends ExecTest {
     int recordCount = 0;
     int batchCount = 0;
 
-    while(exec.next()){
+    while(exec.next()) {
       batchCount++;
-      IntVector c1 = exec.getValueVectorById(new SchemaPath("blue", ExpressionPosition.UNKNOWN), IntVector.class);
-      BigIntVector c2 = exec.getValueVectorById(new SchemaPath("alt", ExpressionPosition.UNKNOWN), BigIntVector.class);
+      final IntVector c1 = exec.getValueVectorById(new SchemaPath("blue", ExpressionPosition.UNKNOWN), IntVector.class);
+      final BigIntVector c2 = exec.getValueVectorById(new SchemaPath("alt", ExpressionPosition.UNKNOWN), BigIntVector.class);
 
-      IntVector.Accessor a1 = c1.getAccessor();
-      BigIntVector.Accessor a2 = c2.getAccessor();
+      final IntVector.Accessor a1 = c1.getAccessor();
+      final BigIntVector.Accessor a2 = c2.getAccessor();
 
-      for(int i =0; i < c1.getAccessor().getValueCount(); i++){
+      for(int i =0; i < c1.getAccessor().getValueCount(); i++) {
         recordCount++;
-        assert previousInt <= a1.get(i);
+        assertTrue(previousInt <= a1.get(i));
 
-        if(previousInt != a1.get(i)){
+        if(previousInt != a1.get(i)) {
           previousLong = Long.MAX_VALUE;
           previousInt = a1.get(i);
         }
 
-        assert previousLong >= a2.get(i);
-
+        assertTrue(previousLong >= a2.get(i));
         //System.out.println(previousInt + "\t" + a2.get(i));
-
       }
-
-
     }
 
     System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
 
-    if(context.getFailureCause() != null){
+    if(context.getFailureCause() != null) {
       throw context.getFailureCause();
     }
     assertTrue(!context.isFailed());
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestInfoSchemaFilterPushDown.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestInfoSchemaFilterPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestInfoSchemaFilterPushDown.java
index b6e789b..f0d216b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestInfoSchemaFilterPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/ischema/TestInfoSchemaFilterPushDown.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.exec.store.ischema;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.drill.PlanTestBase;
 import org.junit.Test;
 
@@ -24,34 +27,34 @@ public class TestInfoSchemaFilterPushDown extends PlanTestBase {
 
   @Test
   public void testFilterPushdown_Equal() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA='INFORMATION_SCHEMA'";
-    String scan = "Scan(groupscan=[TABLES, filter=equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA='INFORMATION_SCHEMA'";
+    final String scan = "Scan(groupscan=[TABLES, filter=equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
 
     testHelper(query, scan, false);
   }
 
   @Test
   public void testFilterPushdown_NonEqual() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA <> 'INFORMATION_SCHEMA'";
-    String scan = "Scan(groupscan=[TABLES, filter=not_equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA <> 'INFORMATION_SCHEMA'";
+    final String scan = "Scan(groupscan=[TABLES, filter=not_equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
 
     testHelper(query, scan, false);
   }
 
   @Test
   public void testFilterPushdown_Like() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA LIKE '%SCH%'";
-    String scan = "Scan(groupscan=[TABLES, filter=like(Field=TABLE_SCHEMA,Literal=%SCH%)])";
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_SCHEMA LIKE '%SCH%'";
+    final String scan = "Scan(groupscan=[TABLES, filter=like(Field=TABLE_SCHEMA,Literal=%SCH%)])";
 
     testHelper(query, scan, false);
   }
 
   @Test
   public void testFilterPushdown_And() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
         "TABLE_SCHEMA = 'sys' AND " +
         "TABLE_NAME <> 'version'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=booleanand(equal(Field=TABLE_SCHEMA,Literal=sys)," +
+    final String scan = "Scan(groupscan=[COLUMNS, filter=booleanand(equal(Field=TABLE_SCHEMA,Literal=sys)," +
         "not_equal(Field=TABLE_NAME,Literal=version))])";
 
     testHelper(query, scan, false);
@@ -59,11 +62,11 @@ public class TestInfoSchemaFilterPushDown extends PlanTestBase {
 
   @Test
   public void testFilterPushdown_Or() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
         "TABLE_SCHEMA = 'sys' OR " +
         "TABLE_NAME <> 'version' OR " +
         "TABLE_SCHEMA like '%sdfgjk%'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=booleanor(equal(Field=TABLE_SCHEMA,Literal=sys)," +
+    final String scan = "Scan(groupscan=[COLUMNS, filter=booleanor(equal(Field=TABLE_SCHEMA,Literal=sys)," +
         "not_equal(Field=TABLE_NAME,Literal=version),like(Field=TABLE_SCHEMA,Literal=%sdfgjk%))])";
 
     testHelper(query, scan, false);
@@ -71,49 +74,49 @@ public class TestInfoSchemaFilterPushDown extends PlanTestBase {
 
   @Test
   public void testFilterPushDownWithProject_Equal() throws Exception {
-    String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_SCHEMA = 'INFORMATION_SCHEMA'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
+    final String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_SCHEMA = 'INFORMATION_SCHEMA'";
+    final String scan = "Scan(groupscan=[COLUMNS, filter=equal(Field=TABLE_SCHEMA,Literal=INFORMATION_SCHEMA)])";
     testHelper(query, scan, false);
   }
 
   @Test
   public void testFilterPushDownWithProject_NotEqual() throws Exception {
-    String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_NAME <> 'TABLES'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=not_equal(Field=TABLE_NAME,Literal=TABLES)])";
+    final String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_NAME <> 'TABLES'";
+    final String scan = "Scan(groupscan=[COLUMNS, filter=not_equal(Field=TABLE_NAME,Literal=TABLES)])";
     testHelper(query, scan, false);
   }
 
   @Test
   public void testFilterPushDownWithProject_Like() throws Exception {
-    String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_NAME LIKE '%BL%'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=like(Field=TABLE_NAME,Literal=%BL%)])";
+    final String query = "SELECT COLUMN_NAME from INFORMATION_SCHEMA.`COLUMNS` WHERE TABLE_NAME LIKE '%BL%'";
+    final String scan = "Scan(groupscan=[COLUMNS, filter=like(Field=TABLE_NAME,Literal=%BL%)])";
     testHelper(query, scan, false);
   }
 
   @Test
   public void testPartialFilterPushDownWithProject() throws Exception {
-    String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
+    final String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
         "TABLE_SCHEMA = 'sys' AND " +
         "TABLE_NAME = 'version' AND " +
         "COLUMN_NAME like 'commit%s'";
-    String scan = "Scan(groupscan=[COLUMNS, filter=booleanand(equal(Field=TABLE_SCHEMA,Literal=sys)," +
+    final String scan = "Scan(groupscan=[COLUMNS, filter=booleanand(equal(Field=TABLE_SCHEMA,Literal=sys)," +
         "equal(Field=TABLE_NAME,Literal=version))])";
 
     testHelper(query, scan, true);
   }
 
-  private void testHelper(String query, String filterInScan, boolean filterPrelExpected) throws Exception {
-    String plan = getPlanInString("EXPLAIN PLAN FOR " + query, OPTIQ_FORMAT);
+  private void testHelper(final String query, String filterInScan, boolean filterPrelExpected) throws Exception {
+    final String plan = getPlanInString("EXPLAIN PLAN FOR " + query, OPTIQ_FORMAT);
 
     if (!filterPrelExpected) {
       // If filter prel is not expected, make sure it is not in plan
-      assert !plan.contains("Filter(");
+      assertFalse(plan.contains("Filter("));
     } else {
-      assert plan.contains("Filter(");
+      assertTrue(plan.contains("Filter("));
     }
 
     // Check for filter pushed into scan.
-    assert plan.contains(filterInScan);
+    assertTrue(plan.contains(filterInScan));
 
     // run the query
     test(query);

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/FieldInfo.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/FieldInfo.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/FieldInfo.java
index 34f60ba..98313bc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/FieldInfo.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/FieldInfo.java
@@ -20,21 +20,27 @@ package org.apache.drill.exec.store.parquet;
 import org.apache.drill.common.types.TypeProtos;
 
 public class FieldInfo {
-  String parquetType;
-  String name;
-  int bitLength;
-  int numberOfPages;
-  Object[] values;
-  TypeProtos.MinorType type;
+  final String parquetType;
+  final String name;
+  final int bitLength;
+  final int numberOfPages;
+  final Object[] values;
+  final TypeProtos.MinorType type;
 
-  FieldInfo(String parquetType, String name, int bitLength, Object[] values, TypeProtos.MinorType type, ParquetTestProperties props){
+  FieldInfo(String parquetType, String name, int bitLength, Object[] values,
+      TypeProtos.MinorType type, ParquetTestProperties props){
     this.parquetType = parquetType;
     this.name = name;
     this.bitLength  = bitLength;
-    this.numberOfPages = Math.max(1, (int) Math.ceil( ((long) props.recordsPerRowGroup) * bitLength / 8.0 / props.bytesPerPage));
-    this.values = values;
+    this.numberOfPages = Math.max(1,
+        (int) Math.ceil( ((long) props.recordsPerRowGroup) * bitLength / 8.0 / props.bytesPerPage));
+
     // generator is designed to use 3 values
-    assert values.length == 3;
+    if (values.length != 3) {
+      throw new IllegalStateException("generator is designed to use 3 values");
+    }
+    this.values = values;
+
     this.type = type;
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/6f54223e/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
index 8fdaa72..61380cf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
@@ -81,20 +81,21 @@ import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
 @Ignore
-public class ParquetRecordReaderTest extends BaseTestQuery{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRecordReaderTest.class);
+public class ParquetRecordReaderTest extends BaseTestQuery {
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRecordReaderTest.class);
 
-  static boolean VERBOSE_DEBUG = false;
+  static final boolean VERBOSE_DEBUG = false;
 
-  static final int numberRowGroups = 1;
-  static final int recordsPerRowGroup = 300;
-  static int DEFAULT_BYTES_PER_PAGE = 1024 * 1024 * 1;
-  static final String fileName = "/tmp/parquet_test_file_many_types";
+  private static final int numberRowGroups = 1;
+  private static final int recordsPerRowGroup = 300;
+  private static int DEFAULT_BYTES_PER_PAGE = 1024 * 1024 * 1;
+  private static final String fileName = "/tmp/parquet_test_file_many_types";
 
   @BeforeClass
-  public static void generateFile() throws Exception{
-    File f = new File(fileName);
-    ParquetTestProperties props = new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, new HashMap<String, FieldInfo>());
+  public static void generateFile() throws Exception {
+    final File f = new File(fileName);
+    final ParquetTestProperties props =
+        new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, new HashMap<String, FieldInfo>());
     populateFieldInfoMap(props);
     if (!f.exists()) {
       TestFileGenerator.generateParquetFile(fileName, props);
@@ -104,7 +105,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   @Test
   public void testMultipleRowGroupsAndReads3() throws Exception {
-    String planName = "/parquet/parquet_scan_screen.json";
+    final String planName = "/parquet/parquet_scan_screen.json";
     testParquetFullEngineLocalPath(planName, fileName, 2, numberRowGroups, recordsPerRowGroup);
   }
 
@@ -115,45 +116,43 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   @Test
   public void testMultipleRowGroupsAndReads2() throws Exception {
-    String readEntries;
-    readEntries = "";
+    final StringBuilder readEntries = new StringBuilder();
     // number of times to read the file
     int i = 3;
     for (int j = 0; j < i; j++) {
-      readEntries += "\""+fileName+"\"";
+      readEntries.append('"');
+      readEntries.append(fileName);
+      readEntries.append('"');
       if (j < i - 1) {
-        readEntries += ",";
+        readEntries.append(',');
       }
     }
-    String planText = Files.toString(FileUtils.getResourceAsFile("/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
+
+    final String planText = Files.toString(FileUtils.getResourceAsFile(
+        "/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst(
+            "&REPLACED_IN_PARQUET_TEST&", readEntries.toString());
     testParquetFullEngineLocalText(planText, fileName, i, numberRowGroups, recordsPerRowGroup, true);
   }
 
   @Test
 
   public void testDictionaryError() throws Exception {
-    String readEntries;
-    readEntries = "\"/tmp/lineitem_null_dict.parquet\"";
-
-    String planText = Files.toString(FileUtils.getResourceAsFile("/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
-    //testParquetFullEngineLocalText(planText, fileName, 1, 1, 100000, false);
-
     testFull(QueryType.SQL, "select L_RECEIPTDATE from dfs.`/tmp/lineitem_null_dict.parquet`", "", 1, 1, 100000, false);
   }
 
   @Test
   public void testNullableAgg() throws Exception {
-
-    List<QueryDataBatch> result = testSqlWithResults("select sum(a) as total_sum from dfs.`/tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
+    final List<QueryDataBatch> result = testSqlWithResults(
+        "select sum(a) as total_sum from dfs.`/tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
     assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
-    RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
+    final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
 
-    QueryDataBatch b = result.get(0);
+    final QueryDataBatch b = result.get(0);
     loader.load(b.getHeader().getDef(), b.getData());
 
-    VectorWrapper vw = loader.getValueAccessorById(
-        NullableBigIntVector.class, //
-        loader.getValueVectorId(SchemaPath.getCompoundPath("total_sum")).getFieldIds() //
+    final VectorWrapper vw = loader.getValueAccessorById(
+        NullableBigIntVector.class,
+        loader.getValueVectorId(SchemaPath.getCompoundPath("total_sum")).getFieldIds()
     );
     assertEquals(4999950000l, vw.getValueVector().getAccessor().getObject(0));
     b.release();
@@ -162,16 +161,17 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   @Test
   public void testNullableFilter() throws Exception {
-    List<QueryDataBatch> result = testSqlWithResults("select count(wr_return_quantity) as row_count from dfs.`/tmp/web_returns` where wr_return_quantity = 1");
+    final List<QueryDataBatch> result = testSqlWithResults(
+        "select count(wr_return_quantity) as row_count from dfs.`/tmp/web_returns` where wr_return_quantity = 1");
     assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
-    RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
+    final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
 
-    QueryDataBatch b = result.get(0);
+    final QueryDataBatch b = result.get(0);
     loader.load(b.getHeader().getDef(), b.getData());
 
-    VectorWrapper vw = loader.getValueAccessorById(
-        BigIntVector.class, //
-        loader.getValueVectorId(SchemaPath.getCompoundPath("row_count")).getFieldIds() //
+    final VectorWrapper vw = loader.getValueAccessorById(
+        BigIntVector.class,
+        loader.getValueVectorId(SchemaPath.getCompoundPath("row_count")).getFieldIds()
     );
     assertEquals(3573l, vw.getValueVector().getAccessor().getObject(0));
     b.release();
@@ -181,20 +181,23 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   @Test
   public void testFixedBinary() throws Exception {
-    String readEntries = "\"/tmp/drilltest/fixed_binary.parquet\"";
-
-    String planText = Files.toString(FileUtils.getResourceAsFile("/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8).replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
+    final String readEntries = "\"/tmp/drilltest/fixed_binary.parquet\"";
+    final String planText = Files.toString(FileUtils.getResourceAsFile(
+        "/parquet/parquet_scan_screen_read_entry_replace.json"), Charsets.UTF_8)
+          .replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
     testParquetFullEngineLocalText(planText, fileName, 1, 1, 1000000, false);
   }
 
   @Test
   public void testNonNullableDictionaries() throws Exception {
-    testFull(QueryType.SQL, "select * from dfs.`/tmp/drilltest/non_nullable_dictionary.parquet`", "", 1, 1, 30000000, false);
+    testFull(QueryType.SQL,
+        "select * from dfs.`/tmp/drilltest/non_nullable_dictionary.parquet`", "", 1, 1, 30000000, false);
   }
 
   @Test
   public void testNullableVarCharMemory() throws Exception {
-    testFull(QueryType.SQL, "select s_comment,s_suppkey from dfs.`/tmp/sf100_supplier.parquet`", "", 1, 1, 1000, false);
+    testFull(QueryType.SQL,
+        "select s_comment,s_suppkey from dfs.`/tmp/sf100_supplier.parquet`", "", 1, 1, 1000, false);
   }
 
   @Test
@@ -205,88 +208,99 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
   @Test
   public void testDrill_1314() throws Exception {
     testFull(QueryType.SQL, "select l_partkey " +
-        "from dfs.`/tmp/drill_1314.parquet`", "", 1,1, 10000, false);
+        "from dfs.`/tmp/drill_1314.parquet`", "", 1, 1, 10000, false);
   }
 
   @Test
   public void testDrill_1314_all_columns() throws Exception {
-    testFull(QueryType.SQL, "select * " +
-        "from dfs.`/tmp/drill_1314.parquet`", "", 1,1, 10000, false);
+    testFull(QueryType.SQL, "select * from dfs.`/tmp/drill_1314.parquet`", "", 1, 1, 10000, false);
   }
 
   @Test
   public void testDictionaryError_419() throws Exception {
-    testFull(QueryType.SQL, "select c_address from dfs.`/tmp/customer_snappyimpala_drill_419.parquet`", "", 1, 1, 150000, false);
+    testFull(QueryType.SQL,
+        "select c_address from dfs.`/tmp/customer_snappyimpala_drill_419.parquet`", "", 1, 1, 150000, false);
   }
 
   @Test
   public void testNonExistentColumn() throws Exception {
-    testFull(QueryType.SQL, "select non_existent_column from cp.`tpch/nation.parquet`", "", 1, 1, 150000, false);
+    testFull(QueryType.SQL,
+        "select non_existent_column from cp.`tpch/nation.parquet`", "", 1, 1, 150000, false);
   }
 
 
   @Test
 
   public void testNonExistentColumnLargeFile() throws Exception {
-    testFull(QueryType.SQL, "select non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`", "", 1, 1, 150000, false);
+    testFull(QueryType.SQL,
+        "select non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`", "", 1, 1, 150000, false);
   }
 
   @Test
 
   public void testNonExistentColumnsSomePresentColumnsLargeFile() throws Exception {
-    testFull(QueryType.SQL, "select cust_key, address,  non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`", "", 1, 1, 150000, false);
+    testFull(QueryType.SQL,
+        "select cust_key, address,  non_existent_column, non_existent_col_2 from dfs.`/tmp/customer.dict.parquet`",
+        "", 1, 1, 150000, false);
   }
 
   @Ignore // ignored for now for performance
   @Test
   public void testTPCHPerformace_SF1() throws Exception {
-    testFull(QueryType.SQL, "select * from dfs.`/tmp/orders_part-m-00001.parquet`", "", 1, 1, 150000, false);
+    testFull(QueryType.SQL,
+        "select * from dfs.`/tmp/orders_part-m-00001.parquet`", "", 1, 1, 150000, false);
   }
 
   @Test
   public void testLocalDistributed() throws Exception {
-    String planName = "/parquet/parquet_scan_union_screen_physical.json";
+    final String planName = "/parquet/parquet_scan_union_screen_physical.json";
     testParquetFullEngineLocalTextDistributed(planName, fileName, 1, numberRowGroups, recordsPerRowGroup);
   }
 
   @Test
   @Ignore
   public void testRemoteDistributed() throws Exception {
-    String planName = "/parquet/parquet_scan_union_screen_physical.json";
+    final String planName = "/parquet/parquet_scan_union_screen_physical.json";
     testParquetFullEngineRemote(planName, fileName, 1, numberRowGroups, recordsPerRowGroup);
   }
 
 
-  public void testParquetFullEngineLocalPath(String planFileName, String filename, int numberOfTimesRead /* specified in json plan */, int numberOfRowGroups, int recordsPerRowGroup) throws Exception{
+  public void testParquetFullEngineLocalPath(String planFileName, String filename,
+      int numberOfTimesRead /* specified in json plan */,
+      int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
     testParquetFullEngineLocalText(Files.toString(FileUtils.getResourceAsFile(planFileName), Charsets.UTF_8), filename,
         numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, true);
   }
 
   //specific tests should call this method, but it is not marked as a test itself intentionally
-  public void testParquetFullEngineLocalText(String planText, String filename, int numberOfTimesRead /* specified in json plan */,
-                                             int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception{
+  public void testParquetFullEngineLocalText(String planText, String filename,
+      int numberOfTimesRead /* specified in json plan */,
+      int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception {
     testFull(QueryType.LOGICAL, planText, filename, numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, testValues);
   }
 
-  private void testFull(QueryType type, String planText, String filename, int numberOfTimesRead /* specified in json plan */,
-                        int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception{
+  private void testFull(QueryType type, String planText, String filename,
+      int numberOfTimesRead /* specified in json plan */,
+      int numberOfRowGroups, int recordsPerRowGroup, boolean testValues) throws Exception {
 
-//    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-    HashMap<String, FieldInfo> fields = new HashMap<>();
-    ParquetTestProperties props = new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
+    // final RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
+    final HashMap<String, FieldInfo> fields = new HashMap<>();
+    final ParquetTestProperties props =
+        new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
     TestFileGenerator.populateFieldInfoMap(props);
-    ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
-    Stopwatch watch = new Stopwatch().start();
+    final ParquetResultListener resultListener =
+        new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
+    final Stopwatch watch = new Stopwatch().start();
     testWithListener(type, planText, resultListener);
     resultListener.getResults();
-//    batchLoader.clear();
+    // batchLoader.clear();
     System.out.println(String.format("Took %d ms to run query", watch.elapsed(TimeUnit.MILLISECONDS)));
-
   }
 
-
   //use this method to submit physical plan
-  public void testParquetFullEngineLocalTextDistributed(String planName, String filename, int numberOfTimesRead /* specified in json plan */, int numberOfRowGroups, int recordsPerRowGroup) throws Exception{
+  public void testParquetFullEngineLocalTextDistributed(String planName, String filename,
+      int numberOfTimesRead /* specified in json plan */,
+      int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
     String planText = Files.toString(FileUtils.getResourceAsFile(planName), Charsets.UTF_8);
     testFull(QueryType.PHYSICAL, planText, filename, numberOfTimesRead, numberOfRowGroups, recordsPerRowGroup, true);
   }
@@ -296,7 +310,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
   }
 
   public String pad(String value, int length, String with) {
-    StringBuilder result = new StringBuilder(length);
+    final StringBuilder result = new StringBuilder(length);
     result.append(value);
 
     while (result.length() < length) {
@@ -306,19 +320,22 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
     return result.toString();
   }
 
-  public void testParquetFullEngineRemote(String plan, String filename, int numberOfTimesRead /* specified in json plan */, int numberOfRowGroups, int recordsPerRowGroup) throws Exception{
-    HashMap<String, FieldInfo> fields = new HashMap<>();
-    ParquetTestProperties props = new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
+  public void testParquetFullEngineRemote(String plan, String filename,
+      int numberOfTimesRead /* specified in json plan */,
+      int numberOfRowGroups, int recordsPerRowGroup) throws Exception {
+    final HashMap<String, FieldInfo> fields = new HashMap<>();
+    final ParquetTestProperties props =
+        new ParquetTestProperties(numberRowGroups, recordsPerRowGroup, DEFAULT_BYTES_PER_PAGE, fields);
     TestFileGenerator.populateFieldInfoMap(props);
-    ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, true);
+    final ParquetResultListener resultListener =
+        new ParquetResultListener(getAllocator(), props, numberOfTimesRead, true);
     testWithListener(QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile(plan), Charsets.UTF_8), resultListener);
     resultListener.getResults();
   }
 
-  class MockOutputMutator implements OutputMutator {
-    List<MaterializedField> removedFields = Lists.newArrayList();
-    List<ValueVector> addFields = Lists.newArrayList();
-
+ private static class MockOutputMutator implements OutputMutator {
+    private final List<MaterializedField> removedFields = Lists.newArrayList();
+    private final List<ValueVector> addFields = Lists.newArrayList();
 
     List<MaterializedField> getRemovedFields() {
       return removedFields;
@@ -335,7 +352,6 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
     @Override
     public void allocate(int recordCount) {
-
     }
 
     @Override
@@ -349,7 +365,6 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
     }
   }
 
-
   private void validateFooters(final List<Footer> metadata) {
     logger.debug(metadata.toString());
     assertEquals(3, metadata.size());
@@ -512,7 +527,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
         "unused, no file is generated", 1, props, QueryType.LOGICAL);
   }
 
-  // requires binary file generated by pig from TPCH data, also have to disable assert where data is coming in
+  // requires binary file generated by pig from TPCH data, also have to disable assertion where data is coming in
 
   @Test
   public void testMultipleRowGroupsAndReadsPigError() throws Exception {
@@ -533,7 +548,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   @Test
   public void test958_sql() throws Exception {
-//    testFull(QueryType.SQL, "select ss_ext_sales_price from dfs.`/tmp/store_sales`", "", 1, 1, 30000000, false);
+    // testFull(QueryType.SQL, "select ss_ext_sales_price from dfs.`/tmp/store_sales`", "", 1, 1, 30000000, false);
     testFull(QueryType.SQL, "select * from dfs.`/tmp/store_sales`", "", 1, 1, 30000000, false);
   }
 
@@ -581,7 +596,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
     testParquetFullEngineEventBased(true, false, "/parquet/parquet_selective_column_read.json", null, "/tmp/test.parquet", 1, props, QueryType.PHYSICAL);
   }
 
-  public static void main(String[] args) throws Exception{
+  public static void main(String[] args) throws Exception {
     // TODO - not sure why this has a main method, test below can be run directly
     //new ParquetRecordReaderTest().testPerformance();
   }
@@ -601,16 +616,16 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 //    };
 
     final String fileName = "/tmp/parquet_test_performance.parquet";
-    HashMap<String, FieldInfo> fields = new HashMap<>();
-    ParquetTestProperties props = new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields);
+    final HashMap<String, FieldInfo> fields = new HashMap<>();
+    final ParquetTestProperties props = new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields);
     populateFieldInfoMap(props);
     //generateParquetFile(fileName, props);
 
-    Configuration dfsConfig = new Configuration();
-    List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
-    Footer f = footers.iterator().next();
+    final Configuration dfsConfig = new Configuration();
+    final List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
+    final Footer f = footers.iterator().next();
 
-    List<SchemaPath> columns = Lists.newArrayList();
+    final List<SchemaPath> columns = Lists.newArrayList();
     columns.add(new SchemaPath("_MAP.integer", ExpressionPosition.UNKNOWN));
     columns.add(new SchemaPath("_MAP.bigInt", ExpressionPosition.UNKNOWN));
     columns.add(new SchemaPath("_MAP.f", ExpressionPosition.UNKNOWN));
@@ -620,14 +635,14 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
     columns.add(new SchemaPath("_MAP.bin2", ExpressionPosition.UNKNOWN));
     int totalRowCount = 0;
 
-    FileSystem fs = new CachedSingleFileSystem(fileName);
-    BufferAllocator allocator = new TopLevelAllocator();
+    final FileSystem fs = new CachedSingleFileSystem(fileName);
+    final BufferAllocator allocator = new TopLevelAllocator();
     for(int i = 0; i < 25; i++) {
-      ParquetRecordReader rr = new ParquetRecordReader(context, 256000, fileName, 0, fs,
+      final ParquetRecordReader rr = new ParquetRecordReader(context, 256000, fileName, 0, fs,
           new DirectCodecFactory(dfsConfig, allocator), f.getParquetMetadata(), columns);
-      TestOutputMutator mutator = new TestOutputMutator(allocator);
+      final TestOutputMutator mutator = new TestOutputMutator(allocator);
       rr.setup(null, mutator);
-      Stopwatch watch = new Stopwatch();
+      final Stopwatch watch = new Stopwatch();
       watch.start();
 
       int rowCount = 0;
@@ -644,36 +659,37 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
 
   // specific tests should call this method, but it is not marked as a test itself intentionally
   public void testParquetFullEngineEventBased(boolean generateNew, String plan, String readEntries, String filename,
-                                              int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception{
+      int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception {
     testParquetFullEngineEventBased(true, generateNew, plan, readEntries,filename,
-                                              numberOfTimesRead /* specified in json plan */, props, QueryType.LOGICAL);
+        numberOfTimesRead /* specified in json plan */, props, QueryType.LOGICAL);
   }
 
 
   // specific tests should call this method, but it is not marked as a test itself intentionally
-  public void testParquetFullEngineEventBased(boolean generateNew, String plan, String filename, int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception{
+  public void testParquetFullEngineEventBased(boolean generateNew, String plan, String filename,
+      int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props) throws Exception {
     testParquetFullEngineEventBased(true, generateNew, plan, null, filename, numberOfTimesRead, props, QueryType.LOGICAL);
   }
 
   // specific tests should call this method, but it is not marked as a test itself intentionally
-  public void testParquetFullEngineEventBased(boolean testValues, boolean generateNew, String plan, String readEntries, String filename,
-                                              int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props,
-                                              QueryType queryType) throws Exception{
+  public void testParquetFullEngineEventBased(boolean testValues, boolean generateNew, String plan,
+      String readEntries, String filename,
+      int numberOfTimesRead /* specified in json plan */, ParquetTestProperties props,
+      QueryType queryType) throws Exception {
     if (generateNew) {
       TestFileGenerator.generateParquetFile(filename, props);
     }
 
-    ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
-    long C = System.nanoTime();
+    final ParquetResultListener resultListener = new ParquetResultListener(getAllocator(), props, numberOfTimesRead, testValues);
+    final long C = System.nanoTime();
     String planText = Files.toString(FileUtils.getResourceAsFile(plan), Charsets.UTF_8);
     // substitute in the string for the read entries, allows reuse of the plan file for several tests
     if (readEntries != null) {
       planText = planText.replaceFirst( "&REPLACED_IN_PARQUET_TEST&", readEntries);
     }
-    this.testWithListener(queryType, planText, resultListener);
+    testWithListener(queryType, planText, resultListener);
     resultListener.getResults();
-    long D = System.nanoTime();
+    final long D = System.nanoTime();
     System.out.println(String.format("Took %f s to run query", (float)(D-C) / 1E9));
   }
-
 }