You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2018/08/11 11:52:12 UTC

[GitHub] asfgit closed pull request #1415: DRILL-6656: Disallow extra semicolons in import statements.

asfgit closed pull request #1415: DRILL-6656: Disallow extra semicolons in import statements.
URL: https://github.com/apache/drill/pull/1415
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index e66a340f1f8..799f6918c4e 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -57,7 +57,7 @@ public static boolean isComplex(final MajorType type) {
   }
 
   public static boolean isRepeated(final MajorType type) {
-    return type.getMode() == REPEATED ;
+    return type.getMode() == REPEATED;
   }
 
   public static boolean isNumericType(final MajorType type) {
diff --git a/common/src/main/java/org/apache/drill/common/util/DrillStringUtils.java b/common/src/main/java/org/apache/drill/common/util/DrillStringUtils.java
index bc3b01b3ef1..cf4b4c8c158 100644
--- a/common/src/main/java/org/apache/drill/common/util/DrillStringUtils.java
+++ b/common/src/main/java/org/apache/drill/common/util/DrillStringUtils.java
@@ -117,7 +117,7 @@ public static String escapeSql(String str) {
    */
   public static String toBinaryString(ByteBuf buf, int strStart, int strEnd) {
     StringBuilder result = new StringBuilder();
-    for (int i = strStart; i < strEnd ; ++i) {
+    for (int i = strStart; i < strEnd; ++i) {
       appendByte(result, buf.getByte(i));
     }
     return result.toString();
diff --git a/common/src/test/java/org/apache/drill/test/DrillAssert.java b/common/src/test/java/org/apache/drill/test/DrillAssert.java
index 0feca06d2ac..5683d2933eb 100644
--- a/common/src/test/java/org/apache/drill/test/DrillAssert.java
+++ b/common/src/test/java/org/apache/drill/test/DrillAssert.java
@@ -36,13 +36,16 @@ public static void assertMultiLineStringEquals(String message, String expected,
         ch1 = expected.charAt(idx1);
         ch2 = actual.charAt(idx2);
         if (isNewLineChar(ch1)) {
-          idx1++; continue;
+          idx1++;
+          continue;
         } else if (isNewLineChar(ch2)) {
-          idx2++; continue;
+          idx2++;
+          continue;
         } else if (ch1 != ch2) {
           break outside;
         } else {
-          idx1++; idx2++;
+          idx1++;
+          idx2++;
         }
       }
       // skip newlines at the end
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
index 6e1efe512c9..e73b512725f 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
@@ -292,7 +292,7 @@ private HBaseScanSpec createHBaseScanSpec(FunctionCall call, CompareFunctionsPro
             startRow = prefix.getBytes(Charsets.UTF_8);
             stopRow = startRow.clone();
             boolean isMaxVal = true;
-            for (int i = stopRow.length - 1; i >= 0 ; --i) {
+            for (int i = stopRow.length - 1; i >= 0; --i) {
               int nextByteValue = (0xff & stopRow[i]) + 1;
               if (nextByteValue < 0xff) {
                 stopRow[i] = (byte) nextByteValue;
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
index 632f0465993..1128f3991db 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
@@ -125,7 +125,7 @@ protected void doPushFilterToScan(final RelOptRuleCall call, final FilterPrel fi
     final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
 
     // Depending on whether is a project in the middle, assign either scan or copy of project to childRel.
-    final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of((RelNode)newScanPrel));;
+    final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of(newScanPrel));
 
     if (hbaseFilterBuilder.isAllExpressionsConverted()) {
         /*
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseConnectionManager.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseConnectionManager.java
index 36224b305c2..3227277c512 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseConnectionManager.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseConnectionManager.java
@@ -31,8 +31,8 @@ public void testHBaseConnectionManager() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "row_key\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` tableName"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` tableName",
+        8);
 
     /*
      * Simulate HBase connection close and ensure that the connection
@@ -42,8 +42,8 @@ public void testHBaseConnectionManager() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "row_key\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` tableName"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` tableName",
+        8);
 
     /*
      * Simulate HBase cluster restart and ensure that running query against
@@ -54,8 +54,8 @@ public void testHBaseConnectionManager() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "row_key\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` tableName"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` tableName",
+        8);
 
   }
 
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
index aee18eb7047..cccaeb1c0c6 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
@@ -91,8 +91,8 @@ public void testFilterPushDownCompositeDateRowKey1() throws Exception {
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') < DATE '2015-06-18' AND\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'"
-        , 12);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'",
+        12);
   }
 
   @Test
@@ -104,8 +104,8 @@ public void testFilterPushDownCompositeDateRowKey2() throws Exception {
         + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n"
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') = DATE '2015-08-22'"
-        , 3);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') = DATE '2015-08-22'",
+        3);
   }
 
   @Test
@@ -118,8 +118,8 @@ public void testFilterPushDownCompositeDateRowKey3() throws Exception {
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') < DATE '2015-06-18' AND\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'",
+        1);
   }
 
   @Test
@@ -133,8 +133,8 @@ public void testFilterPushDownCompositeDateRowKey4() throws Exception {
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') >= TIMESTAMP '2015-06-18 08:00:00.000' AND\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') < TIMESTAMP '2015-06-20 16:00:00.000'"
-        , 7);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') < TIMESTAMP '2015-06-20 16:00:00.000'",
+        7);
   }
 
   @Test
@@ -146,8 +146,8 @@ public void testFilterPushDownCompositeTimeRowKey1() throws Exception {
         + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n"
         + " FROM hbase.`TestTableCompositeTime` tableName\n"
         + " WHERE\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:57:15.275'"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:57:15.275'",//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n"
+        1);
   }
 
   @Test
@@ -159,8 +159,9 @@ public void testFilterPushDownCompositeTimeRowKey2() throws Exception {
         + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n"
         + " FROM hbase.`TestTableCompositeTime` tableName\n"
         + " WHERE\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:55:51.250'"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:55:51.250'",//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'),
+        // 'BIGINT_BE') \n"
+        1);
   }
 
   @Test
@@ -173,8 +174,8 @@ public void testFilterPushDownCompositeTimeRowKey3() throws Exception {
         + " FROM hbase.`TestTableCompositeTime` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') > TIME '23:57:06' AND"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') < TIME '23:59:59'"
-        , 8);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') < TIME '23:59:59'",
+        8);
   }
 
   @Test
@@ -186,8 +187,9 @@ public void testFilterPushDownCompositeBigIntRowKey1() throws Exception {
         + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n"
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'bigint_be') = cast(1409040000000 as bigint)"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'bigint_be') = cast(1409040000000 as bigint)",//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'),
+        // 'BIGINT_BE') \n"
+        1);
   }
 
   @Test
@@ -202,8 +204,8 @@ public void testFilterPushDownCompositeBigIntRowKey2() throws Exception {
         + " FROM hbase.`TestTableCompositeDate` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'uint8_be') > cast(1438300800000 as bigint) AND\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'uint8_be') < cast(1438617600000 as bigint)"
-        , 10);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'uint8_be') < cast(1438617600000 as bigint)",
+        10);
   }
 
   @Test
@@ -216,8 +218,8 @@ public void testFilterPushDownCompositeIntRowKey1() throws Exception {
         + " FROM hbase.`TestTableCompositeInt` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') >= cast(423 as int) AND"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(940 as int)"
-        , 11);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(940 as int)",
+        11);
   }
 
   @Test
@@ -230,8 +232,8 @@ public void testFilterPushDownCompositeIntRowKey2() throws Exception {
         + " FROM hbase.`TestTableCompositeInt` tableName\n"
         + " WHERE\n"
         + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') >= cast(300 as int) AND"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(900 as int)"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(900 as int)",
+        1);
   }
 
   @Test
@@ -243,8 +245,8 @@ public void testFilterPushDownCompositeIntRowKey3() throws Exception {
         + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n"
         + " FROM hbase.`TestTableCompositeInt` tableName\n"
         + " WHERE\n"
-        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') = cast(658 as int)"
-        , 1);
+        + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') = cast(658 as int)",
+        1);
   }
 
   @Test
@@ -256,8 +258,8 @@ public void testFilterPushDownDoubleOB() throws Exception {
         + "FROM\n"
         + "  hbase.`TestTableDoubleOB` t\n"
         + "WHERE\n"
-        + "  CONVERT_FROM(row_key, 'DOUBLE_OB') > cast(95.54 as DOUBLE)"
-        , 6);
+        + "  CONVERT_FROM(row_key, 'DOUBLE_OB') > cast(95.54 as DOUBLE)",
+        6);
   }
 
   @Test
@@ -270,8 +272,8 @@ public void testFilterPushDownDoubleOBPlan() throws Exception {
         + "FROM\n"
         + "  hbase.`TestTableDoubleOB` t\n"
         + "WHERE\n"
-        + "  CONVERT_FROM(row_key, 'DOUBLE_OB') > cast(95.54 as DOUBLE)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'DOUBLE_OB') > cast(95.54 as DOUBLE)",
+        1);
   }
 
   @Test
@@ -283,8 +285,8 @@ public void testFilterPushDownDoubleOBDesc() throws Exception {
         + "FROM\n"
         + "  hbase.`TestTableDoubleOBDesc` t\n"
         + "WHERE\n"
-        + "  CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)"
-        , 6);
+        + "  CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)",
+        6);
   }
 
   @Test
@@ -297,8 +299,8 @@ public void testFilterPushDownDoubleOBDescPlan() throws Exception {
         + "FROM\n"
         + "  hbase.`TestTableDoubleOBDesc` t\n"
         + "WHERE\n"
-        + "  CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)",
+        1);
   }
 
   @Test
@@ -311,8 +313,8 @@ public void testFilterPushDownIntOB() throws Exception {
         + "  hbase.`TestTableIntOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'INT_OB') >= cast(-32 as INT) AND"
-        + "  CONVERT_FROM(row_key, 'INT_OB') < cast(59 as INT)"
-        , 91);
+        + "  CONVERT_FROM(row_key, 'INT_OB') < cast(59 as INT)",
+        91);
   }
 
   @Test
@@ -325,8 +327,8 @@ public void testFilterPushDownIntOBDesc() throws Exception {
         + "  hbase.`TestTableIntOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'INT_OBD') >= cast(-32 as INT) AND"
-        + "  CONVERT_FROM(row_key, 'INT_OBD') < cast(59 as INT)"
-        , 91);
+        + "  CONVERT_FROM(row_key, 'INT_OBD') < cast(59 as INT)",
+        91);
   }
 
   @Test
@@ -340,8 +342,8 @@ public void testFilterPushDownIntOBPlan() throws Exception {
         + "  hbase.`TestTableIntOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'INT_OB') > cast(-23 as INT) AND"
-        + "  CONVERT_FROM(row_key, 'INT_OB') < cast(14 as INT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'INT_OB') < cast(14 as INT)",
+        1);
   }
 
   @Test
@@ -355,8 +357,8 @@ public void testFilterPushDownIntOBDescPlan() throws Exception {
         + "  hbase.`TestTableIntOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'INT_OBD') > cast(-23 as INT) AND"
-        + "  CONVERT_FROM(row_key, 'INT_OBD') < cast(14 as INT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'INT_OBD') < cast(14 as INT)",
+        1);
   }
 
   @Test
@@ -369,8 +371,8 @@ public void testFilterPushDownBigIntOB() throws Exception {
         + "  hbase.`TestTableBigIntOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'BIGINT_OB') > cast(1438034423063 as BIGINT) AND"
-        + "  CONVERT_FROM(row_key, 'BIGINT_OB') <= cast(1438034423097 as BIGINT)"
-        , 34);
+        + "  CONVERT_FROM(row_key, 'BIGINT_OB') <= cast(1438034423097 as BIGINT)",
+        34);
   }
 
   @Test
@@ -384,8 +386,8 @@ public void testFilterPushDownBigIntOBPlan() throws Exception {
         + "  hbase.`TestTableBigIntOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'BIGINT_OB') > cast(1438034423063 as BIGINT) AND"
-        + "  CONVERT_FROM(row_key, 'BIGINT_OB') < cast(1438034423097 as BIGINT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'BIGINT_OB') < cast(1438034423097 as BIGINT)",
+        1);
   }
 
   @Test
@@ -398,8 +400,8 @@ public void testFilterPushDownFloatOB() throws Exception {
         + "  hbase.`TestTableFloatOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'FLOAT_OB') > cast(95.74 as FLOAT) AND"
-        + "  CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.5 as FLOAT)"
-        , 5);
+        + "  CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.5 as FLOAT)",
+        5);
   }
 
   @Test
@@ -413,8 +415,8 @@ public void testFilterPushDownFloatOBPlan() throws Exception {
         + "  hbase.`TestTableFloatOB` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'FLOAT_OB') > cast(95.54 as FLOAT) AND"
-        + "  CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.77 as FLOAT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.77 as FLOAT)",
+        1);
   }
 
   @Test
@@ -427,8 +429,8 @@ public void testFilterPushDownBigIntOBDesc() throws Exception {
         + "  hbase.`TestTableBigIntOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'BIGINT_OBD') > cast(1438034423063 as BIGINT) AND"
-        + "  CONVERT_FROM(row_key, 'BIGINT_OBD') <= cast(1438034423097 as BIGINT)"
-        , 34);
+        + "  CONVERT_FROM(row_key, 'BIGINT_OBD') <= cast(1438034423097 as BIGINT)",
+        34);
   }
 
   @Test
@@ -442,8 +444,8 @@ public void testFilterPushDownBigIntOBDescPlan() throws Exception {
         + "  hbase.`TestTableBigIntOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'BIGINT_OBD') > cast(1438034423063 as BIGINT) AND"
-        + "  CONVERT_FROM(row_key, 'BIGINT_OBD') < cast(1438034423097 as BIGINT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'BIGINT_OBD') < cast(1438034423097 as BIGINT)",
+        1);
   }
 
   @Test
@@ -456,8 +458,8 @@ public void testFilterPushDownFloatOBDesc() throws Exception {
         + "  hbase.`TestTableFloatOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'FLOAT_OBD') > cast(95.74 as FLOAT) AND"
-        + "  CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.5 as FLOAT)"
-        , 5);
+        + "  CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.5 as FLOAT)",
+        5);
   }
 
   @Test
@@ -471,8 +473,8 @@ public void testFilterPushDownFloatOBDescPlan() throws Exception {
         + "  hbase.`TestTableFloatOBDesc` t\n"
         + "WHERE\n"
         + "  CONVERT_FROM(row_key, 'FLOAT_OBD') > cast(95.54 as FLOAT) AND"
-        + "  CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.77 as FLOAT)"
-        , 1);
+        + "  CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.77 as FLOAT)",
+        1);
   }
 
   @Test
@@ -680,8 +682,8 @@ public void testFilterPushDownConvertExpressionWithNumber() throws Exception {
         + "FROM\n"
         + "  hbase.`[TABLE_NAME]` tableName\n"
         + "WHERE\n"
-        + "  convert_from(row_key, 'INT_BE') = 75"
-        , 1);
+        + "  convert_from(row_key, 'INT_BE') = 75",
+        1);
   }
 
   @Test
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseProjectPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseProjectPushDown.java
index 28bf0361379..9c0c70768f7 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseProjectPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseProjectPushDown.java
@@ -31,8 +31,8 @@ public void testRowKeyPushDown() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "row_key\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` tableName"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` tableName",
+        8);
   }
 
   @Test
@@ -41,8 +41,8 @@ public void testColumnWith1RowPushDown() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "t.f2.c7 as `t.f2.c7`\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` t"
-        , 1);
+        + "  hbase.`[TABLE_NAME]` t",
+        1);
   }
 
   @Test
@@ -55,8 +55,8 @@ public void testRowKeyAndColumnPushDown() throws Exception{
         + "row_key, t.f.c1 * 31 as `t dot f dot c1 * 31`, "
         + "t.f.c2 as `t dot f dot c2`, 5 as `5`, 'abc' as `'abc'`\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` t"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` t",
+        8);
   }
 
   @Test
@@ -65,8 +65,8 @@ public void testColumnFamilyPushDown() throws Exception{
     runHBaseSQLVerifyCount("SELECT\n"
         + "row_key, f, f2\n"
         + "FROM\n"
-        + "  hbase.`[TABLE_NAME]` tableName"
-        , 8);
+        + "  hbase.`[TABLE_NAME]` tableName",
+        8);
   }
 
 }
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
index 27882b59e70..e8fa925ebf3 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseQueries.java
@@ -54,8 +54,8 @@ public void testWithEmptyFirstAndLastRegion() throws Exception {
       setColumnWidths(new int[] {8, 15});
       runHBaseSQLVerifyCount("SELECT *\n"
           + "FROM\n"
-          + "  hbase.`" + tableName + "` tableName\n"
-          , 1);
+          + "  hbase.`" + tableName + "` tableName\n",
+          1);
     } finally {
       try {
         admin.disableTable(tableName);
@@ -78,8 +78,8 @@ public void testWithEmptyTable() throws Exception {
       setColumnWidths(new int[] {8, 15});
       runHBaseSQLVerifyCount("SELECT row_key, count(*)\n"
           + "FROM\n"
-          + "  hbase.`" + tableName + "` tableName GROUP BY row_key\n"
-          , 0);
+          + "  hbase.`" + tableName + "` tableName GROUP BY row_key\n",
+          0);
     } finally {
       try {
         admin.disableTable(tableName);
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseRegionScanAssignments.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseRegionScanAssignments.java
index 112eb87f4da..a414bb7758f 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseRegionScanAssignments.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseRegionScanAssignments.java
@@ -21,6 +21,7 @@
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.NavigableMap;
@@ -176,8 +177,9 @@ public void testHBaseGroupScanAssignmentSomeAfinedWithOrphans() throws Exception
     scan.applyAssignments(endpoints);
 
     LinkedList<Integer> sizes = Lists.newLinkedList();
-    sizes.add(1); sizes.add(1); sizes.add(1); sizes.add(1); sizes.add(1); sizes.add(1); sizes.add(1); sizes.add(1);
-    sizes.add(2); sizes.add(2); sizes.add(2); sizes.add(2); sizes.add(2);
+    Collections.addAll(sizes, 1, 1, 1, 1, 1, 1, 1, 1);
+    Collections.addAll(sizes, 2, 2, 2, 2, 2);
+
     for (int i = 0; i < endpoints.size(); i++) {
       assertTrue(sizes.remove((Integer)scan.getSpecificScan(i).getRegionScanSpecList().size()));
     }
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
index 26d1c440b35..3c650db4520 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
@@ -235,7 +235,8 @@ public static void generateHBaseDataset2(Connection conn, Admin admin, TableName
       for (int i = 0; i < numberRegions; i++) {
         Put p = new Put((""+rowKeyChar+iteration).getBytes());
         for (int j = 1; j <= numColumns; j++) {
-          bytes = new byte[5000]; random.nextBytes(bytes);
+          bytes = new byte[5000];
+          random.nextBytes(bytes);
           p.addColumn("f".getBytes(), ("c"+j).getBytes(), bytes);
         }
         table.mutate(p);
@@ -316,7 +317,7 @@ public static void generateHBaseDatasetCompositeKeyDate(Connection conn, Admin a
     long endTime    = startTime + MILLISECONDS_IN_A_YEAR;
     long interval   = MILLISECONDS_IN_A_DAY / 3;
 
-    for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter ++) {
+    for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter++) {
       byte[] rowKey = ByteBuffer.allocate(16) .putLong(ts).array();
 
       for(int i = 0; i < 8; ++i) {
@@ -356,7 +357,7 @@ public static void generateHBaseDatasetCompositeKeyTime(Connection conn, Admin a
     long largeInterval   = MILLISECONDS_IN_A_SEC * 42;
     long interval        = smallInterval;
 
-    for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter ++) {
+    for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter++) {
       byte[] rowKey = ByteBuffer.allocate(16) .putLong(ts).array();
 
       for(int i = 0; i < 8; ++i) {
@@ -398,7 +399,7 @@ public static void generateHBaseDatasetCompositeKeyInt(Connection conn, Admin ad
     int stopVal = 1000;
     int interval = 47;
     long counter = 0;
-    for (int i = startVal; i < stopVal; i += interval, counter ++) {
+    for (int i = startVal; i < stopVal; i += interval, counter++) {
       byte[] rowKey = ByteBuffer.allocate(12).putInt(i).array();
 
       for(int j = 0; j < 8; ++j) {
@@ -492,7 +493,7 @@ public static void generateHBaseDatasetBigIntOB(Connection conn, Admin admin, Ta
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
     long startTime = (long)1438034423 * 1000;
-    for (long i = startTime; i <= startTime + 100; i ++) {
+    for (long i = startTime; i <= startTime + 100; i++) {
       byte[] bytes = new byte[9];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
       OrderedBytes.encodeInt64(br, i, Order.ASCENDING);
@@ -523,7 +524,7 @@ public static void generateHBaseDatasetIntOB(Connection conn, Admin admin, Table
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
 
-    for (int i = -49; i <= 100; i ++) {
+    for (int i = -49; i <= 100; i++) {
       byte[] bytes = new byte[5];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
       OrderedBytes.encodeInt32(br, i, Order.ASCENDING);
@@ -616,7 +617,7 @@ public static void generateHBaseDatasetBigIntOBDesc(Connection conn, Admin admin
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
     long startTime = (long)1438034423 * 1000;
-    for (long i = startTime; i <= startTime + 100; i ++) {
+    for (long i = startTime; i <= startTime + 100; i++) {
       byte[] bytes = new byte[9];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 9);
       OrderedBytes.encodeInt64(br, i, Order.DESCENDING);
@@ -648,7 +649,7 @@ public static void generateHBaseDatasetIntOBDesc(Connection conn, Admin admin, T
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
 
-    for (int i = -49; i <= 100; i ++) {
+    for (int i = -49; i <= 100; i++) {
       byte[] bytes = new byte[5];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
       OrderedBytes.encodeInt32(br, i, Order.DESCENDING);
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index 6fc567ee196..2beb5394ff0 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -436,7 +436,7 @@ public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, fi
       storageHandler.configureInputJobProperties(tableDesc, table.getParameters());
       return (Class<? extends InputFormat<?, ?>>) storageHandler.getInputFormatClass();
     } else {
-      return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName) ;
+      return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName);
     }
   }
 
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/initilializers/DefaultReadersInitializer.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/initilializers/DefaultReadersInitializer.java
index 52f394f41af..b38346935bd 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/initilializers/DefaultReadersInitializer.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/readers/initilializers/DefaultReadersInitializer.java
@@ -46,7 +46,7 @@ public DefaultReadersInitializer(FragmentContext context, HiveSubScan config, Cl
 
     List<RecordReader> readers = new LinkedList<>();
     Constructor<? extends HiveAbstractReader> readerConstructor = createReaderConstructor();
-    for (int i = 0 ; i < inputSplits.size(); i++) {
+    for (int i = 0; i < inputSplits.size(); i++) {
       readers.add(createReader(readerConstructor, hasPartitions ? partitions.get(i) : null, inputSplits.get(i)));
     }
     return readers;
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
index 7808352c6b3..932b144b152 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
@@ -84,7 +84,8 @@ public void longInOut() throws Exception{
   @Test
   public void floatInOut() throws Exception{
     String query = "SELECT testHiveUDFFloat(float_field) as col1 FROM hive.readtest";
-    String expected = "col1\n" + "4.67\n" + "null\n";    helper(query, expected);
+    String expected = "col1\n" + "4.67\n" + "null\n";
+    helper(query, expected);
   }
 
   @Test
@@ -105,7 +106,8 @@ public void stringInOut() throws Exception{
   public void binaryInOut() throws Exception{
     String query = "SELECT testHiveUDFBinary(binary_field) as col1 FROM hive.readtest";
     String expected = "col1\n" + "binaryfield\n" + "null\n";
-    helper(query, expected);    helper(query, expected);
+    helper(query, expected);
+    helper(query, expected);
   }
 
   @Test
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcExpressionCheck.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcExpressionCheck.java
index e0b01b3889b..4637abb4eae 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcExpressionCheck.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcExpressionCheck.java
@@ -80,7 +80,6 @@ public Boolean visitOver(RexOver over) {
     if (!visitCall(over)) {
       return false;
     }
-    ;
 
     final RexWindow window = over.getWindow();
     for (RexFieldCollation orderKey : window.orderKeys) {
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
index c38ea3b97b9..efb5a5c7efc 100755
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
@@ -305,7 +305,8 @@ public Table getTable(String name) {
     public JdbcCatalogSchema(String name) {
       super(ImmutableList.<String> of(), name);
 
-      try (Connection con = source.getConnection(); ResultSet set = con.getMetaData().getCatalogs()) {
+      try (Connection con = source.getConnection();
+           ResultSet set = con.getMetaData().getCatalogs()) {
         while (set.next()) {
           final String catalogName = set.getString(1);
           CapitalizingJdbcSchema schema = new CapitalizingJdbcSchema(
@@ -347,7 +348,8 @@ void setHolder(SchemaPlus plusOfThis) {
 
     private boolean addSchemas() {
       boolean added = false;
-      try (Connection con = source.getConnection(); ResultSet set = con.getMetaData().getSchemas()) {
+      try (Connection con = source.getConnection();
+           ResultSet set = con.getMetaData().getSchemas()) {
         while (set.next()) {
           final String schemaName = set.getString(1);
           final String catalogName = set.getString(2);
diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPartitionScanSpecBuilder.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPartitionScanSpecBuilder.java
index b52ed442080..202670e2efa 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPartitionScanSpecBuilder.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPartitionScanSpecBuilder.java
@@ -41,7 +41,7 @@
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(KafkaPartitionScanSpecBuilder.class);
   private final LogicalExpression le;
   private final KafkaGroupScan groupScan;
-  private final KafkaConsumer<? ,?> kafkaConsumer;
+  private final KafkaConsumer<?, ?> kafkaConsumer;
   private ImmutableMap<TopicPartition, KafkaPartitionScanSpec> fullScanSpec;
   private static final long CLOSE_TIMEOUT_MS = 200;
 
diff --git a/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/BatchScheduler.java b/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/BatchScheduler.java
index 8f3aaabd92e..acb254a52c8 100644
--- a/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/BatchScheduler.java
+++ b/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/BatchScheduler.java
@@ -35,7 +35,10 @@ public void completed(Task task) {
   }
 
   @Override
-  public int resize(int level) { quantity = level; return quantity; }
+  public int resize(int level) {
+    quantity = level;
+    return quantity;
+  }
 
   @Override
   public int getTarget() { return quantity; }
diff --git a/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/http/AMSecurityManagerImpl.java b/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/http/AMSecurityManagerImpl.java
index d31690e1a8a..2dbba651fd4 100644
--- a/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/http/AMSecurityManagerImpl.java
+++ b/drill-yarn/src/main/java/org/apache/drill/yarn/appMaster/http/AMSecurityManagerImpl.java
@@ -182,7 +182,6 @@ public void init() {
       managerImpl.init();
     } else if (DoYUtil.isBlank(authType)
         || DrillOnYarnConfig.AUTH_TYPE_NONE.equals(authType)) {
-      ;
     } else {
       LOG.error("Unrecognized authorization type for "
           + DrillOnYarnConfig.HTTP_AUTH_TYPE + ": " + authType
diff --git a/drill-yarn/src/main/java/org/apache/drill/yarn/client/CleanCommand.java b/drill-yarn/src/main/java/org/apache/drill/yarn/client/CleanCommand.java
index 1fcba2d4798..5a8a8b107de 100644
--- a/drill-yarn/src/main/java/org/apache/drill/yarn/client/CleanCommand.java
+++ b/drill-yarn/src/main/java/org/apache/drill/yarn/client/CleanCommand.java
@@ -70,7 +70,6 @@ private void removeArchive(String archiveName) {
     try {
       dfs.removeDrillFile(archiveName);
       System.out.println(" Removed");
-      ;
     } catch (DfsFacadeException e) {
       System.out.println();
       System.err.println(e.getMessage());
diff --git a/drill-yarn/src/test/java/org/apache/drill/yarn/core/TestConfig.java b/drill-yarn/src/test/java/org/apache/drill/yarn/core/TestConfig.java
index 99cb2c62ba1..b2ec78626d5 100644
--- a/drill-yarn/src/test/java/org/apache/drill/yarn/core/TestConfig.java
+++ b/drill-yarn/src/test/java/org/apache/drill/yarn/core/TestConfig.java
@@ -81,7 +81,7 @@ protected URL findResource(String name) {
         try {
           return file.toURI().toURL();
         } catch (MalformedURLException e) {
-          ;
+          // noop
         }
       }
       return null;
diff --git a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
index 8a909a5d21f..f9d2ee313a4 100644
--- a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
+++ b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
@@ -809,7 +809,7 @@ private void captureOutput(RunResult result) throws IOException {
         }
         result.analyze();
       } catch (FileNotFoundException e) {
-        ;
+        // noop
       }
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index f5556cf9b63..9bec3933a07 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -49,7 +49,7 @@ private ExecConstants() {
   public static final String ZK_REFRESH = "drill.exec.zk.refresh";
   public static final String BIT_RETRY_TIMES = "drill.exec.rpc.bit.server.retry.count";
   public static final String BIT_RETRY_DELAY = "drill.exec.rpc.bit.server.retry.delay";
-  public static final String BIT_TIMEOUT = "drill.exec.bit.timeout" ;
+  public static final String BIT_TIMEOUT = "drill.exec.bit.timeout";
   public static final String SERVICE_NAME = "drill.exec.cluster-id";
   public static final String INITIAL_BIT_PORT = "drill.exec.rpc.bit.server.port";
   public static final String INITIAL_DATA_PORT = "drill.exec.rpc.bit.server.dataport";
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/SerializationDefinition.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/SerializationDefinition.java
index 84c37ba47c8..8fcb9e921a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/SerializationDefinition.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/SerializationDefinition.java
@@ -29,8 +29,7 @@
   STORAGE_PLUGINS(3003, StoragePlugins.class),
   FRAGMENT_STATUS(3004, FragmentStatus.class),
   FRAGMENT_HANDLE(3005, FragmentHandle.class),
-  PLAN_FRAGMENT(3006, PlanFragment.class)
-  ;
+  PLAN_FRAGMENT(3006, PlanFragment.class);
 
   public final int id;
   public final Class<?> clazz;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
index ccd7e36361b..bca59dc2eea 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
@@ -824,7 +824,7 @@ public void runQuery(QueryType type, String plan, UserResultsListener resultsLis
   private class ListHoldingResultsListener implements UserResultsListener {
     private final Vector<QueryDataBatch> results = new Vector<>();
     private final SettableFuture<List<QueryDataBatch>> future = SettableFuture.create();
-    private final UserProtos.RunQuery query ;
+    private final UserProtos.RunQuery query;
 
     public ListHoldingResultsListener(UserProtos.RunQuery query) {
       logger.debug( "Listener created for query \"\"\"{}\"\"\"", query );
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
index ff0d0b24756..7458c22dd5a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DumpCat.java
@@ -180,7 +180,7 @@ protected void doQuery(FileInputStream input) throws Exception{
       aggBatchMetaInfo.add(getBatchMetaInfo(vcSerializable));
 
       if (vectorContainer.getRecordCount() == 0) {
-        emptyBatchNum ++;
+        emptyBatchNum++;
       }
 
       if (prevSchema != null && !vectorContainer.getSchema().equals(prevSchema)) {
@@ -188,7 +188,7 @@ protected void doQuery(FileInputStream input) throws Exception{
       }
 
       prevSchema = vectorContainer.getSchema();
-      batchNum ++;
+      batchNum++;
 
       vectorContainer.zeroVectors();
     }
@@ -219,7 +219,7 @@ protected void doBatch(FileInputStream input, int targetBatchNum, boolean showHe
 
     VectorAccessibleSerializable vcSerializable = null;
 
-    while (input.available() > 0 && batchNum ++ < targetBatchNum) {
+    while (input.available() > 0 && batchNum++ < targetBatchNum) {
       vcSerializable = new VectorAccessibleSerializable(DumpCat.allocator);
       vcSerializable.readFromStream(input);
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
index ebb31f492c6..f15c01a02fd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/CodeGeneratorMethod.java
@@ -58,7 +58,7 @@ public CodeGeneratorMethod(Method m) {
       throw new RuntimeException(String.format("Unexpected number of parameter names %s.  Expected %s on method %s.", Arrays.toString(parameterNames), Arrays.toString(types), m.toGenericString()));
     }
     arguments = new CodeGeneratorArgument[parameterNames.length];
-    for (int i = 0 ; i < parameterNames.length; i++) {
+    for (int i = 0; i < parameterNames.length; i++) {
       arguments[i] = new CodeGeneratorArgument(parameterNames[i], types[i]);
     }
     exs = m.getExceptionTypes();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/DotDrillType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/DotDrillType.java
index e94d9f85983..a8b5f4b4c4f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/DotDrillType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/dotdrill/DotDrillType.java
@@ -21,10 +21,10 @@
 import org.apache.hadoop.fs.Path;
 
 public enum DotDrillType {
-  VIEW
+  VIEW;
   // ,FORMAT
   // ,STATS
-  ;
+
 
   private final String ending;
 
@@ -70,4 +70,4 @@ public String getEnding() {
     b.append("}.drill");
     DOT_DRILL_GLOB = b.toString();
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index 4486972ee12..373cdf69a37 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -778,7 +778,7 @@ private HoldingContainer visitBooleanAnd(BooleanOperator op,
         setBlock.assign(out.getValue(), JExpr.lit(1));
       } else {
         assert (e == null);
-        eval.assign(out.getValue(), JExpr.lit(1)) ;
+        eval.assign(out.getValue(), JExpr.lit(1));
       }
 
       generator.unNestEvalBlock();     // exit from nested block
@@ -841,7 +841,7 @@ private HoldingContainer visitBooleanOr(BooleanOperator op,
         setBlock.assign(out.getValue(), JExpr.lit(0));
       } else {
         assert (e == null);
-        eval.assign(out.getValue(), JExpr.lit(0)) ;
+        eval.assign(out.getValue(), JExpr.lit(0));
       }
 
       generator.unNestEvalBlock();   // exit from nested block.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/SizedJBlock.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/SizedJBlock.java
index fdc469724d0..6263dec5268 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/SizedJBlock.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/SizedJBlock.java
@@ -42,7 +42,7 @@ public JBlock getBlock() {
   }
 
   public void incCounter() {
-    this.count ++;
+    this.count++;
   }
 
   public int getCount() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillAggFuncHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillAggFuncHolder.java
index 1a5df670f6c..59638922af4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillAggFuncHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/DrillAggFuncHolder.java
@@ -93,13 +93,13 @@ public boolean isAggregating() {
         //Loop through all workspace vectors, to get the minimum of size of all workspace vectors.
         JVar sizeVar = setupBlock.decl(g.getModel().INT, "vectorSize", JExpr.lit(Integer.MAX_VALUE));
         JClass mathClass = g.getModel().ref(Math.class);
-        for (int id = 0; id < getWorkspaceVars().length; id ++) {
+        for (int id = 0; id < getWorkspaceVars().length; id++) {
           if (!getWorkspaceVars()[id].isInject()) {
             setupBlock.assign(sizeVar,mathClass.staticInvoke("min").arg(sizeVar).arg(g.getWorkspaceVectors().get(getWorkspaceVars()[id]).invoke("getValueCapacity")));
           }
         }
 
-        for(int i =0 ; i < getWorkspaceVars().length; i++) {
+        for (int i = 0; i < getWorkspaceVars().length; i++) {
           if (!getWorkspaceVars()[i].isInject()) {
             setupBlock.assign(workspaceJVars[i], JExpr._new(g.getHolderType(getWorkspaceVars()[i].getMajorType())));
           }
@@ -156,7 +156,7 @@ public HoldingContainer renderEnd(ClassGenerator<?> classGenerator, HoldingConta
   private JVar[] declareWorkspaceVectors(ClassGenerator<?> g) {
     JVar[] workspaceJVars = new JVar[getWorkspaceVars().length];
 
-    for(int i =0 ; i < getWorkspaceVars().length; i++){
+    for (int i = 0; i < getWorkspaceVars().length; i++) {
       if (getWorkspaceVars()[i].isInject()) {
         workspaceJVars[i] = g.declareClassField("work", g.getModel()._ref(getWorkspaceVars()[i].getType()));
         g.getBlock(BlockType.SETUP).assign(workspaceJVars[i], g.getMappingSet().getIncoming().invoke("getContext").invoke("getManagedBuffer"));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Hash32Functions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Hash32Functions.java
index 412674f1381..4789a48d40a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Hash32Functions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/Hash32Functions.java
@@ -346,7 +346,7 @@ public void eval() {
     }
   }
 
-  @FunctionTemplate(names = {"hash", "hash32" ,"hash32AsDouble"}, scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+  @FunctionTemplate(names = {"hash", "hash32", "hash32AsDouble"}, scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
   public static class NullableTimeHash implements DrillSimpleFunc {
     @Param  NullableTimeHolder in;
     @Output IntHolder out;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
index 43098486cc7..c23a1a29846 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
@@ -498,7 +498,7 @@ public void eval() {
 
       for (int id = input.start; id < input.end; id++) {
         byte  currentByte = input.buffer.getByte(id);
-        out.buffer.setByte(id - input.start, Character.toLowerCase(currentByte)) ;
+        out.buffer.setByte(id - input.start, Character.toLowerCase(currentByte));
       }
     }
   }
@@ -529,7 +529,7 @@ public void eval() {
 
       for (int id = input.start; id < input.end; id++) {
         byte currentByte = input.buffer.getByte(id);
-        out.buffer.setByte(id - input.start, Character.toUpperCase(currentByte)) ;
+        out.buffer.setByte(id - input.start, Character.toUpperCase(currentByte));
       }
     }
   }
@@ -711,9 +711,9 @@ public void eval() {
         final int charCount = org.apache.drill.exec.expr.fn.impl.StringFunctionUtil.getUTF8CharLength(string.buffer, string.start, string.end);
         final int charLen;
         if (length.value > 0) {
-          charLen = Math.min((int)length.value, charCount);  //left('abc', 5) -> 'abc'
+          charLen = Math.min((int) length.value, charCount);  //left('abc', 5) -> 'abc'
         } else if (length.value < 0) {
-          charLen = Math.max(0, charCount + (int)length.value) ; // left('abc', -5) ==> ''
+          charLen = Math.max(0, charCount + (int) length.value); // left('abc', -5) ==> ''
         } else {
           charLen = 0;
         }
@@ -830,7 +830,7 @@ public void eval() {
 
           if (j == from.end ) {
             //find a true match ("from" is not empty), copy entire "to" string to out buffer
-            for (int k = to.start ; k < to.end; k++) {
+            for (int k = to.start; k < to.end; k++) {
               out.buffer.setByte(out.end++, to.buffer.getByte(k));
             }
 
@@ -921,7 +921,7 @@ public void eval() {
                 (currentByte & 0xE0) == 0xC0 ||   // 2-byte char. First byte is 110xxxxx
                 (currentByte & 0xF0) == 0xE0 ||   // 3-byte char. First byte is 1110xxxx
                 (currentByte & 0xF8) == 0xF0) {   //4-byte char. First byte is 11110xxx
-              count ++;  //Advance the counter, since we find one char.
+              count++;  //Advance the counter, since we find one char.
             }
             out.buffer.setByte(out.end++, currentByte);
           }
@@ -1070,7 +1070,7 @@ public void eval() {
                 (currentByte & 0xE0) == 0xC0 ||   // 2-byte char. First byte is 110xxxxx
                 (currentByte & 0xF0) == 0xE0 ||   // 3-byte char. First byte is 1110xxxx
                 (currentByte & 0xF8) == 0xF0) {   //4-byte char. First byte is 11110xxx
-              count ++;  //Advance the counter, since we find one char.
+              count++;  //Advance the counter, since we find one char.
             }
             out.buffer.setByte(out.end++, currentByte);
           }
@@ -1429,7 +1429,7 @@ public void setup() {
 
     @Override
     public void eval() {
-      out.buffer = buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));
+      out.buffer = buffer = buffer.reallocIfNeeded((left.end - left.start) + (right.end - right.start));
       out.start = out.end = 0;
 
       int id = 0;
@@ -1493,7 +1493,7 @@ public void setup() {
 
     @Override
     public void eval() {
-      out.buffer = buffer = buffer.reallocIfNeeded( (left.end - left.start) + (right.end - right.start));;
+      out.buffer = buffer = buffer.reallocIfNeeded((left.end - left.start) + (right.end - right.start));
       out.start = out.end = 0;
 
       int id = 0;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertTo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertTo.java
index e384892365a..1bb2512dec6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertTo.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertTo.java
@@ -44,7 +44,8 @@
 
   private JsonConvertTo(){}
 
-  @FunctionTemplate(names = { "convert_toJSON", "convert_toSIMPLEJSON" } , scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL,
+  @FunctionTemplate(names = { "convert_toJSON", "convert_toSIMPLEJSON" },
+                    scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL,
                     outputWidthCalculatorType = FunctionTemplate.OutputWidthCalculatorType.CUSTOM_FIXED_WIDTH_DEFUALT)
   public static class ConvertToJson implements DrillSimpleFunc{
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
index e127391fe71..c07e6ed37d8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
@@ -316,7 +316,7 @@ private void addFunctions(Map<String, Queue<String>> jar, List<FunctionHolder> n
       final String functionName = function.getName();
       Queue<String> jarFunctions = jar.get(functionName);
       if (jarFunctions == null) {
-        jarFunctions = Queues.newConcurrentLinkedQueue();;
+        jarFunctions = Queues.newConcurrentLinkedQueue();
         jar.put(functionName, jarFunctions);
       }
       final String functionSignature = function.getSignature();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashAggregate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashAggregate.java
index 51f34a017e5..da988dec4f8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashAggregate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashAggregate.java
@@ -104,6 +104,6 @@ public void setMaxAllocation(long maxAllocation) {
   public boolean isBufferedOperator(QueryContext queryContext) {
     // In case forced to use a single partition - do not consider this a buffered op (when memory is divided)
     return queryContext == null ||
-      1 < (int)queryContext.getOptions().getOption(ExecConstants.HASHAGG_NUM_PARTITIONS_VALIDATOR) ;
+      1 < (int) queryContext.getOptions().getOption(ExecConstants.HASHAGG_NUM_PARTITIONS_VALIDATOR);
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashJoinPOP.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashJoinPOP.java
index 48d977ea151..146bb7fd6e7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashJoinPOP.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/HashJoinPOP.java
@@ -88,6 +88,6 @@ public void setMaxAllocation(long maxAllocation) {
   public boolean isBufferedOperator(QueryContext queryContext) {
     // In case forced to use a single partition - do not consider this a buffered op (when memory is divided)
     return queryContext == null ||
-      1 < (int)queryContext.getOptions().getOption(ExecConstants.HASHJOIN_NUM_PARTITIONS_VALIDATOR) ;
+      1 < (int) queryContext.getOptions().getOption(ExecConstants.HASHJOIN_NUM_PARTITIONS_VALIDATOR);
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index 4bbfa05a16e..1954c79a7e4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -154,7 +154,13 @@
   private int cycleNum = 0; // primary, secondary, tertiary, etc.
   private int originalPartition = -1; // the partition a secondary reads from
 
-  private static class SpilledPartition { public int spilledBatches; public String spillFile; int cycleNum; int origPartn; int prevOrigPartn; }
+  private static class SpilledPartition {
+    public int spilledBatches;
+    public String spillFile;
+    int cycleNum;
+    int origPartn;
+    int prevOrigPartn;
+  }
 
   private ArrayList<SpilledPartition> spilledPartitionsList;
   private int operatorId; // for the spill file name
@@ -193,7 +199,6 @@
     AVG_OUTPUT_BATCH_BYTES,
     AVG_OUTPUT_ROW_BYTES,
     OUTPUT_RECORD_COUNT;
-    ;
 
     @Override
     public int metricId() {
@@ -333,7 +338,7 @@ public void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig, Fragme
 
     is2ndPhase = hashAggrConfig.getAggPhase() == AggPrelBase.OperatorPhase.PHASE_2of2;
     isTwoPhase = hashAggrConfig.getAggPhase() != AggPrelBase.OperatorPhase.PHASE_1of1;
-    is1stPhase = isTwoPhase && ! is2ndPhase ;
+    is1stPhase = isTwoPhase && !is2ndPhase;
     canSpill = isTwoPhase; // single phase can not spill
 
     // Typically for testing - force a spill after a partition has more than so many batches
@@ -410,8 +415,8 @@ private void delayedSetup() {
       updateEstMaxBatchSize(incoming);
     }
     // create "reserved memory" and adjust the memory limit down
-    reserveValueBatchMemory = reserveOutgoingMemory = estValuesBatchSize ;
-    long newMemoryLimit = allocator.getLimit() - reserveValueBatchMemory - reserveOutgoingMemory ;
+    reserveValueBatchMemory = reserveOutgoingMemory = estValuesBatchSize;
+    long newMemoryLimit = allocator.getLimit() - reserveValueBatchMemory - reserveOutgoingMemory;
     long memAvail = newMemoryLimit - allocator.getAllocatedMemory();
     if ( memAvail <= 0 ) { throw new OutOfMemoryException("Too little memory available"); }
     allocator.setLimit(newMemoryLimit);
@@ -458,9 +463,9 @@ private void delayedSetup() {
     bitsInMask = Integer.bitCount(partitionMask); // e.g. 0x1F -> 5
 
     // Create arrays (one entry per partition)
-    htables = new HashTable[numPartitions] ;
-    batchHolders = (ArrayList<BatchHolder>[]) new ArrayList<?>[numPartitions] ;
-    outBatchIndex = new int[numPartitions] ;
+    htables = new HashTable[numPartitions];
+    batchHolders = (ArrayList<BatchHolder>[]) new ArrayList<?>[numPartitions];
+    outBatchIndex = new int[numPartitions];
     writers = new Writer[numPartitions];
     spilledBatchesCount = new int[numPartitions];
     spillFiles = new String[numPartitions];
@@ -486,7 +491,11 @@ private void delayedSetup() {
       this.batchHolders[i] = new ArrayList<BatchHolder>(); // First BatchHolder is created when the first put request is received.
     }
     // Initialize the value vectors in the generated code (which point to the incoming or outgoing fields)
-    try { htables[0].updateBatches(); } catch (SchemaChangeException sc) { throw new UnsupportedOperationException(sc); };
+    try {
+      htables[0].updateBatches();
+    } catch (SchemaChangeException sc) {
+      throw new UnsupportedOperationException(sc);
+    }
   }
   /**
    * get new incoming: (when reading spilled files like an "incoming")
@@ -689,7 +698,7 @@ public AggOutcome doWork() {
 
           // Either flag buildComplete or handleEmit (or earlyOutput) would cause returning of
           // the outgoing batch downstream (see innerNext() in HashAggBatch).
-          buildComplete = true ; // now should go and return outgoing
+          buildComplete = true; // now should go and return outgoing
 
           if ( handleEmit ) {
             buildComplete = false; // This was not a real NONE - more incoming is expected
@@ -939,7 +948,7 @@ private int chooseAPartitionToFlush(int currPart, boolean tryAvoidCurr) {
     }
     // Give the current (if already spilled) some priority
     if ( ! tryAvoidCurr && isSpilled(currPart) && ( currPartSize + 1 >= maxSizeSpilled )) {
-      maxSizeSpilled = currPartSize ;
+      maxSizeSpilled = currPartSize;
       indexMaxSpilled = currPart;
     }
     // now find the largest non-spilled partition
@@ -948,7 +957,7 @@ private int chooseAPartitionToFlush(int currPart, boolean tryAvoidCurr) {
     // Use the largest spilled (if found) as a base line, with a factor of 4
     if ( indexMaxSpilled > -1 && maxSizeSpilled > 1 ) {
       indexMax = indexMaxSpilled;
-      maxSize = 4 * maxSizeSpilled ;
+      maxSize = 4 * maxSizeSpilled;
     }
     for ( int insp = 0; insp < numPartitions; insp++) {
       if ( ! isSpilled(insp) && maxSize < batchHolders[insp].size() ) {
@@ -1159,7 +1168,11 @@ public AggIterOutcome outputCurrentBatch() {
         originalPartition = sp.origPartn; // used for the filename
         logger.trace("Reading back spilled original partition {} as an incoming",originalPartition);
         // Initialize .... new incoming, new set of partitions
-        try { initializeSetup(newIncoming); } catch (Exception e) { throw new RuntimeException(e); }
+        try {
+          initializeSetup(newIncoming);
+        } catch (Exception e) {
+          throw new RuntimeException(e);
+        }
         // update the cycle num if needed
         // The current cycle num should always be one larger than in the spilled partition
         if ( cycleNum == sp.cycleNum ) {
@@ -1179,7 +1192,7 @@ public AggIterOutcome outputCurrentBatch() {
         return AggIterOutcome.AGG_RESTART;
       }
 
-      partitionToReturn = nextPartitionToReturn ;
+      partitionToReturn = nextPartitionToReturn;
 
     }
 
@@ -1187,7 +1200,7 @@ public AggIterOutcome outputCurrentBatch() {
     int numPendingOutput = currPartition.get(currOutBatchIndex).getNumPendingOutput();
 
     // The following accounting is for logging, metrics, etc.
-    rowsInPartition += numPendingOutput ;
+    rowsInPartition += numPendingOutput;
     if ( ! handlingSpills ) { rowsNotSpilled += numPendingOutput; }
     else { rowsSpilledReturned += numPendingOutput; }
     if ( earlyOutput ) { rowsReturnedEarly += numPendingOutput; }
@@ -1238,7 +1251,7 @@ public AggIterOutcome outputCurrentBatch() {
           logger.debug("HASH AGG: Finished (early) re-init partition {}, mem allocated: {}", earlyPartition, allocator.getAllocatedMemory());
         }
         outBatchIndex[earlyPartition] = 0; // reset, for next time
-        earlyOutput = false ; // done with early output
+        earlyOutput = false; // done with early output
       }
       else if ( handleEmit ) {
         // When returning the last outgoing batch (following an incoming EMIT), then replace OK with EMIT
@@ -1290,9 +1303,9 @@ public int numGroupedRecords() {
    */
   private String getOOMErrorMsg(String prefix) {
     String errmsg;
-    if ( !isTwoPhase ) {
-      errmsg = "Single Phase Hash Aggregate operator can not spill." ;
-    } else if ( ! canSpill ) {  // 2nd phase, with only 1 partition
+    if (!isTwoPhase) {
+      errmsg = "Single Phase Hash Aggregate operator can not spill.";
+    } else if (!canSpill) {  // 2nd phase, with only 1 partition
       errmsg = "Too little memory available to operator to facilitate spilling.";
     } else { // a bug ?
       errmsg = prefix + " OOM at " + (is2ndPhase ? "Second Phase" : "First Phase") + ". Partitions: " + numPartitions +
@@ -1353,9 +1366,11 @@ private void checkGroupAndAggrValues(int incomingRowIdx) {
     }
 
     // right shift hash code for secondary (or tertiary...) spilling
-    for (int i = 0; i < cycleNum; i++) { hashCode >>>= bitsInMask; }
+    for (int i = 0; i < cycleNum; i++) {
+      hashCode >>>= bitsInMask;
+    }
 
-    int currentPartition = hashCode & partitionMask ;
+    int currentPartition = hashCode & partitionMask;
     hashCode >>>= bitsInMask;
     HashTable.PutStatus putStatus = null;
     long allocatedBeforeHTput = allocator.getAllocatedMemory();
@@ -1398,7 +1413,7 @@ private void checkGroupAndAggrValues(int incomingRowIdx) {
         throw new UnsupportedOperationException("Unexpected schema change", e);
     }
     long allocatedBeforeAggCol = allocator.getAllocatedMemory();
-    boolean needToCheckIfSpillIsNeeded = allocatedBeforeAggCol > allocatedBeforeHTput ;
+    boolean needToCheckIfSpillIsNeeded = allocatedBeforeAggCol > allocatedBeforeHTput;
 
     // Add an Aggr batch if needed:
     //
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
index f58be89291d..4c54650cf32 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggregator.java
@@ -44,7 +44,7 @@
 
   // For returning results from outputCurrentBatch
   // OK - batch returned, NONE - end of data, RESTART - call again, EMIT - like OK but EMIT
-  enum AggIterOutcome { AGG_OK, AGG_NONE, AGG_RESTART , AGG_EMIT }
+  enum AggIterOutcome { AGG_OK, AGG_NONE, AGG_RESTART, AGG_EMIT }
 
   void setup(HashAggregate hashAggrConfig, HashTableConfig htConfig, FragmentContext context, OperatorContext oContext, RecordBatch incoming, HashAggBatch outgoing,
              LogicalExpression[] valueExprs, List<TypedFieldId> valueFieldIds, TypedFieldId[] keyFieldIds, VectorContainer outContainer, int extraRowBytes) throws SchemaChangeException, IOException, ClassTransformationException;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/SpilledRecordbatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/SpilledRecordbatch.java
index c78e2c01f21..7ebce2b4ad5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/SpilledRecordbatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/SpilledRecordbatch.java
@@ -135,7 +135,7 @@ public IterOutcome next() {
 
     if ( spillStream == null ) {
       throw new IllegalStateException("Spill stream was null");
-    };
+    }
 
     if ( spillSet.getPosition(spillStream)  < 0 ) {
       HashAggTemplate.logger.warn("Position is {} for stream {}", spillSet.getPosition(spillStream), spillStream.toString());
@@ -155,7 +155,7 @@ public IterOutcome next() {
       throw UserException.dataReadError(e).addContext("Failed reading from a spill file").build(HashAggTemplate.logger);
     }
 
-    spilledBatches-- ; // one less batch to read
+    spilledBatches--; // one less batch to read
     return IterOutcome.OK;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
index 2a64b930795..23fdcc1d24c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggregator.java
@@ -49,8 +49,7 @@
     RETURN_OUTCOME,
     CLEANUP_AND_RETURN,
     UPDATE_AGGREGATOR,
-    RETURN_AND_RESET
-    ;
+    RETURN_AND_RESET;
   }
 
   public abstract void setup(OperatorContext context, RecordBatch incoming, StreamingAggBatch outgoing) throws SchemaChangeException;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableStats.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableStats.java
index 8f0b7abcb47..8c93f1bb6e8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableStats.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableStats.java
@@ -28,10 +28,10 @@ public HashTableStats() {
   }
 
   public void addStats (HashTableStats newStats) {
-    this.numBuckets += newStats.numBuckets ;
-    this.numEntries += newStats.numEntries ;
-    this.numResizing += newStats.numResizing ;
-    this.resizingTime += newStats.resizingTime ;
+    this.numBuckets += newStats.numBuckets;
+    this.numEntries += newStats.numEntries;
+    this.numResizing += newStats.numResizing;
+    this.resizingTime += newStats.resizingTime;
   }
 }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index 83b72d7c70c..3c418b99c5d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -476,7 +476,7 @@ public void setup(HashTableConfig htConfig, BufferAllocator allocator, VectorCon
     if (tableSize > MAXIMUM_CAPACITY) {
       tableSize = MAXIMUM_CAPACITY;
     }
-    originalTableSize = tableSize ; // retain original size
+    originalTableSize = tableSize; // retain original size
 
     threshold = (int) Math.ceil(tableSize * loadf);
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenTemplate.java
index cd58bfd5d5d..365451c02cd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenTemplate.java
@@ -113,7 +113,7 @@ public final int flattenRecords(final int recordCount, final int firstOutputInde
               } catch (OversizedAllocationException ex) {
                 // unable to flatten due to a soft buffer overflow. split the batch here and resume execution.
                 logger.debug("Reached allocation limit. Splitting the batch at input index: {} - inner index: {} - current completed index: {}",
-                    valueIndexLocal, innerValueIndexLocal, currentInnerValueIndexLocal) ;
+                    valueIndexLocal, innerValueIndexLocal, currentInnerValueIndexLocal);
 
                 /*
                  * TODO
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index d4d4f927e3f..b1ea96f0553 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -776,7 +776,7 @@ public void executeBuildPhase() throws SchemaChangeException {
         for (int ind = 0; ind < currentRecordCount; ind++) {
           int hashCode = ( cycleNum == 0 ) ? partitions[0].getBuildHashCode(ind)
             : read_right_HV_vector.getAccessor().get(ind); // get the hash value from the HV column
-          int currPart = hashCode & partitionMask ;
+          int currPart = hashCode & partitionMask;
           hashCode >>>= bitsInMask;
           // Append the new inner row to the appropriate partition; spill (that partition) if needed
           partitions[currPart].appendInnerRow(buildBatch.getContainer(), ind, hashCode, buildCalc); // may spill if needed
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
index 46f2fa3690d..639f757eccf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
@@ -162,7 +162,7 @@ public void setupHashJoinProbe(RecordBatch probeBatch, HashJoinBatch outgoing, J
    */
   private int appendBuild(VectorContainer buildSrcContainer, int buildSrcIndex) {
     // "- 1" to skip the last "hash values" added column
-    int lastColIndex = buildSrcContainer.getNumberOfColumns() - 1 ;
+    int lastColIndex = buildSrcContainer.getNumberOfColumns() - 1;
     for (int vectorIndex = 0; vectorIndex < lastColIndex; vectorIndex++) {
       ValueVector destVector = container.getValueVector(vectorIndex).getValueVector();
       ValueVector srcVector = buildSrcContainer.getValueVector(vectorIndex).getValueVector();
@@ -292,7 +292,7 @@ private void executeProbePhase() throws SchemaChangeException {
           int hashCode = ( cycleNum == 0 ) ?
             partitions[0].getProbeHashCode(recordsProcessed)
             : read_left_HV_vector.getAccessor().get(recordsProcessed);
-          int currBuildPart = hashCode & partitionMask ;
+          int currBuildPart = hashCode & partitionMask;
           hashCode >>>= bitsInMask;
 
           // Set and keep the current partition (may be used again on subsequent probe calls as
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
index b9745371eeb..52871e2b82b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
@@ -235,7 +235,7 @@ public static boolean isScalarSubquery(RelNode root) {
       if (currentrel instanceof DrillAggregateRel) {
         agg = (DrillAggregateRel)currentrel;
       } else if (currentrel instanceof RelSubset) {
-        currentrel = ((RelSubset) currentrel).getBest() ;
+        currentrel = ((RelSubset) currentrel).getBest();
       } else if (currentrel instanceof DrillLimitRel) {
         // TODO: Improve this check when DRILL-5691 is fixed.
         // The problem is that RelMdMaxRowCount currently cannot be used
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/LateralJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/LateralJoinBatch.java
index 18843b5b4b0..b3780973aaa 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/LateralJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/LateralJoinBatch.java
@@ -624,8 +624,8 @@ private IterOutcome produceOutputBatch() {
           if (leftUpstream == EMIT || leftUpstream == OK_NEW_SCHEMA) {
             break;
           } else {
-            logger.debug("Output batch still has some space left, getting new batches from left and right. OutIndex: {}"
-              , outputIndex);
+            logger.debug("Output batch still has some space left, getting new batches from left and right. OutIndex: {}",
+              outputIndex);
             // Get both left batch and the right batch and make sure indexes are properly set
             leftUpstream = processLeftBatch();
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
index 034d6c23d48..5049b067eaf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
@@ -269,7 +269,7 @@ protected void createPartitioner() throws SchemaChangeException {
     // set up partitioning function
     final LogicalExpression expr = operator.getExpr();
     final ErrorCollector collector = new ErrorCollectorImpl();
-    final ClassGenerator<Partitioner> cg ;
+    final ClassGenerator<Partitioner> cg;
 
     cg = CodeGenerator.getRoot(Partitioner.TEMPLATE_DEFINITION, context.getOptions());
     cg.getCodeGenerator().plainJavaCapable(true);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unnest/UnnestImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unnest/UnnestImpl.java
index 02d2f183393..f0816b62559 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unnest/UnnestImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/unnest/UnnestImpl.java
@@ -54,7 +54,7 @@
   private RepeatedValueVector.RepeatedAccessor accessor;
   private RecordBatch outgoing;
 
-  private IntVector rowIdVector ; // Allocated and owned by the UnnestRecordBatch
+  private IntVector rowIdVector; // Allocated and owned by the UnnestRecordBatch
   private IntVector.Mutator rowIdVectorMutator;
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMemoryManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMemoryManager.java
index 68b546b95f7..5d2721a812f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMemoryManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMemoryManager.java
@@ -266,7 +266,7 @@ public SortMemoryManager(SortConfig config, long opMemoryLimit) {
     memoryLimit = (configMemoryLimit == 0) ? opMemoryLimit
                 : Math.min(opMemoryLimit, configMemoryLimit);
 
-    preferredSpillBatchSize = config.spillBatchSize();;
+    preferredSpillBatchSize = config.spillBatchSize();
     preferredMergeBatchSize = config.mergeBatchSize();
 
     // Initialize the buffer memory limit for the first batch.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnState.java
index acc9556f84f..24e270a4f7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnState.java
@@ -347,8 +347,7 @@ public void dump(HierarchicalFormatter format) {
       .attribute("addVersion", addVersion)
       .attribute("state", state)
       .attributeIdentity("writer", writer)
-      .attribute("vectorState")
-      ;
+      .attribute("vectorState");
     vectorState.dump(format);
     format.endObject();
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
index c7c6fdc5d33..2783c5b30d8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
@@ -795,8 +795,7 @@ public void dump(HierarchicalFormatter format) {
       .attribute("activeSchemaVersion", activeSchemaVersion)
       .attribute("harvestSchemaVersion", harvestSchemaVersion)
       .attribute("pendingRowCount", pendingRowCount)
-      .attribute("targetRowCount", targetRowCount)
-      ;
+      .attribute("targetRowCount", targetRowCount);
     format.attribute("root");
     rootState.dump(format);
     format.attribute("rootWriter");
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/AbstractPartitionDescriptor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/AbstractPartitionDescriptor.java
index b70c764e2b4..cf012569268 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/AbstractPartitionDescriptor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/AbstractPartitionDescriptor.java
@@ -44,7 +44,7 @@
    * Create sublists of the partition locations, each sublist of size
    * at most {@link PartitionDescriptor#PARTITION_BATCH_SIZE}
    */
-  protected abstract void createPartitionSublists() ;
+  protected abstract void createPartitionSublists();
 
   /**
    * Iterator that traverses over the super list of partition locations and
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
index 216c8d27b64..4cd9ed3c56a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
@@ -65,7 +65,7 @@ public static boolean containsStarColumnInProject(RelDataType inputRowType, List
   }
 
   public static boolean isPrefixedStarColumn(String fieldName) {
-    return fieldName.indexOf(PREFIXED_STAR_COLUMN) > 0 ; // the delimiter * starts at none-zero position.
+    return fieldName.indexOf(PREFIXED_STAR_COLUMN) > 0; // the delimiter * starts at none-zero position.
   }
 
   public static boolean isNonPrefixedStarColumn(String fieldName) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
index 10c4738bab2..862fb59fd57 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
@@ -137,7 +137,7 @@ protected  RelOptCost computeCartesianJoinCost(RelOptPlanner planner, RelMetadat
                                     // just to make sure Cartesian Join is more expensive
                                     // than Non-Cartesian Join.
 
-    final int keySize = 1 ;  // assume having 1 join key, when estimate join cost.
+    final int keySize = 1;  // assume having 1 join key, when estimate join cost.
     final DrillCostBase cost = (DrillCostBase) computeHashJoinCostWithKeySize(planner, keySize, mq).multiplyBy(mulFactor);
 
     // Cartesian join row count will be product of two inputs. The other factors come from the above estimated DrillCost.
@@ -197,7 +197,7 @@ private RelOptCost computeHashJoinCostWithKeySize(RelOptPlanner planner, int key
         ) * buildRowCount * factor;
 
     double cpuCost = joinConditionCost * (probeRowCount) // probe size determine the join condition comparison cost
-        + cpuCostBuild + cpuCostProbe ;
+        + cpuCostBuild + cpuCostProbe;
 
     DrillCostFactory costFactory = (DrillCostFactory) planner.getCostFactory();
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillLimitRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillLimitRelBase.java
index 7d070b6acab..fde3896c101 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillLimitRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillLimitRelBase.java
@@ -84,7 +84,7 @@ public RelWriter explainTerms(RelWriter pw) {
 
   @Override
   public double estimateRowCount(RelMetadataQuery mq) {
-    int off = offset != null ? RexLiteral.intValue(offset) : 0 ;
+    int off = offset != null? RexLiteral.intValue(offset): 0;
 
     if (fetch == null) {
       // If estimated rowcount is less than offset return 0
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillProjectRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillProjectRelBase.java
index b7881c2c957..6c1b1433074 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillProjectRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillProjectRelBase.java
@@ -60,7 +60,7 @@
  * Base class for logical and physical Project implemented in Drill
  */
 public abstract class DrillProjectRelBase extends Project implements DrillRelNode {
-  private final int nonSimpleFieldCount ;
+  private final int nonSimpleFieldCount;
 
   protected DrillProjectRelBase(Convention convention, RelOptCluster cluster, RelTraitSet traits, RelNode child, List<? extends RexNode> exps,
       RelDataType rowType) {
@@ -128,10 +128,10 @@ private int getSimpleFieldCount() {
     for (RexNode expr : this.getProjects()) {
       if (expr instanceof RexInputRef) {
         // Simple Field reference.
-        cnt ++;
+        cnt++;
       } else if (expr instanceof RexCall && expr.accept(complexFieldIdentifer)) {
         // Complex field with named segments only.
-        cnt ++;
+        cnt++;
       }
     }
     return cnt;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
index ba55faeb75e..f64cd7708c2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillCostBase.java
@@ -169,7 +169,7 @@ public boolean isInfinite() {
       || (this.io == Double.POSITIVE_INFINITY)
       || (this.network == Double.POSITIVE_INFINITY)
       || (this.rowCount == Double.POSITIVE_INFINITY)
-      || (this.memory == Double.POSITIVE_INFINITY) ;
+      || (this.memory == Double.POSITIVE_INFINITY);
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
index c96f4e6eb96..7497783e383 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
@@ -51,7 +51,7 @@ public static DrillFilterRel convert(org.apache.drill.common.logical.data.Filter
   }
 
   public static DrillFilterRel create(RelNode child, RexNode condition) {
-    return new DrillFilterRel(child.getCluster(), child.getTraitSet(), child, condition)  ;
+    return new DrillFilterRel(child.getCluster(), child.getTraitSet(), child, condition);
   }
 
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index 2cd60688c53..5f2b02c292d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -301,10 +301,15 @@ private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
       case "CHAR":
         castType = Types.required(MinorType.VARCHAR).toBuilder().setPrecision(call.getType().getPrecision()).build();
         break;
-
-      case "INTEGER": castType = Types.required(MinorType.INT); break;
-      case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
-      case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
+      case "INTEGER":
+        castType = Types.required(MinorType.INT);
+        break;
+      case "FLOAT":
+        castType = Types.required(MinorType.FLOAT4);
+        break;
+      case "DOUBLE":
+        castType = Types.required(MinorType.FLOAT8);
+        break;
       case "DECIMAL":
         if (!context.getPlannerSettings().getOptions().getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val) {
           throw UserException
@@ -327,7 +332,9 @@ private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
 
         case "INTERVAL_YEAR":
         case "INTERVAL_YEAR_MONTH":
-        case "INTERVAL_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break;
+        case "INTERVAL_MONTH":
+          castType = Types.required(MinorType.INTERVALYEAR);
+          break;
         case "INTERVAL_DAY":
         case "INTERVAL_DAY_HOUR":
         case "INTERVAL_DAY_MINUTE":
@@ -337,11 +344,19 @@ private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
         case "INTERVAL_HOUR_SECOND":
         case "INTERVAL_MINUTE":
         case "INTERVAL_MINUTE_SECOND":
-        case "INTERVAL_SECOND": castType = Types.required(MinorType.INTERVALDAY); break;
-        case "BOOLEAN": castType = Types.required(MinorType.BIT); break;
-        case "BINARY": castType = Types.required(MinorType.VARBINARY); break;
-        case "ANY": return arg; // Type will be same as argument.
-        default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
+        case "INTERVAL_SECOND":
+          castType = Types.required(MinorType.INTERVALDAY);
+          break;
+        case "BOOLEAN":
+          castType = Types.required(MinorType.BIT);
+          break;
+        case "BINARY":
+          castType = Types.required(MinorType.VARBINARY);
+          break;
+        case "ANY":
+          return arg; // Type will be same as argument.
+        default:
+          castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
       }
       return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionAllRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionAllRule.java
index 293749ba67d..69e9452d7cb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionAllRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionAllRule.java
@@ -61,7 +61,7 @@ public void onMatch(RelOptRuleCall call) {
       call.transformTo(new DrillUnionRel(union.getCluster(), traits, convertedInputs, union.all,
           true /* check compatibility */));
     } catch (InvalidRelException e) {
-      tracer.warn(e.toString()) ;
+      tracer.warn(e.toString());
     }
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionRel.java
index c94d2b5a9c2..a5a6e0330e1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillUnionRel.java
@@ -50,7 +50,7 @@ public DrillUnionRel copy(RelTraitSet traitSet, List<RelNode> inputs,
       return new DrillUnionRel(getCluster(), traitSet, inputs, all,
           false /* don't check compatibility during copy */);
     } catch (InvalidRelException e) {
-      throw new AssertionError(e) ;
+      throw new AssertionError(e);
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
index ca68a7d1a44..ff640a7b686 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/AggPrelBase.java
@@ -49,7 +49,7 @@
 
   public enum OperatorPhase {PHASE_1of1, PHASE_1of2, PHASE_2of2}
 
-  protected OperatorPhase operPhase = OperatorPhase.PHASE_1of1 ; // default phase
+  protected OperatorPhase operPhase = OperatorPhase.PHASE_1of1; // default phase
   protected List<NamedExpression> keys = Lists.newArrayList();
   protected List<NamedExpression> aggExprs = Lists.newArrayList();
   protected List<AggregateCall> phase2AggCallList = Lists.newArrayList();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/BroadcastExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/BroadcastExchangePrel.java
index 2c043689b60..caec426be82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/BroadcastExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/BroadcastExchangePrel.java
@@ -56,7 +56,7 @@ public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
     final double inputRows = mq.getRowCount(child);
 
     final int  rowWidth = child.getRowType().getFieldCount() * DrillCostBase.AVG_FIELD_WIDTH;
-    final double cpuCost = broadcastFactor * DrillCostBase.SVR_CPU_COST * inputRows ;
+    final double cpuCost = broadcastFactor * DrillCostBase.SVR_CPU_COST * inputRows;
     final double networkCost = broadcastFactor * DrillCostBase.BYTE_NETWORK_COST * inputRows * rowWidth * numEndPoints;
 
     return new DrillCostBase(inputRows, cpuCost, 0, networkCost);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/DrillDistributionTrait.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/DrillDistributionTrait.java
index d10021cc7c4..b250d5a94bf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/DrillDistributionTrait.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/DrillDistributionTrait.java
@@ -96,7 +96,7 @@ public DistributionType getType() {
 
   @Override
   public int hashCode() {
-    return  fields == null ? type.hashCode() : type.hashCode() | fields.hashCode() << 4 ;
+    return  fields == null? type.hashCode(): type.hashCode() | fields.hashCode() << 4;
   }
 
   @Override
@@ -106,7 +106,7 @@ public boolean equals(Object obj) {
     }
     if (obj instanceof DrillDistributionTrait) {
       DrillDistributionTrait that = (DrillDistributionTrait) obj;
-      return this.type == that.type && this.fields.equals(that.fields) ;
+      return this.type == that.type && this.fields.equals(that.fields);
     }
     return false;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
index 19499d67bbb..ade0fb349f6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashAggPrule.java
@@ -91,7 +91,7 @@ public void onMatch(RelOptRuleCall call) {
         createTransformRequest(call, aggregate, input, traits);
 
         if (create2PhasePlan(call, aggregate)) {
-          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL) ;
+          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL);
 
           RelNode convertedInput = convert(input, traits);
           new TwoPhaseSubset(call, distOnAllKeys).go(aggregate, convertedInput);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
index 2272a9b97a9..c9be46f9c85 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/HashToMergeExchangePrel.java
@@ -38,7 +38,7 @@
 
   private final List<DistributionField> distFields;
   private int numEndPoints = 0;
-  private final RelCollation collation ;
+  private final RelCollation collation;
 
   public HashToMergeExchangePrel(RelOptCluster cluster, RelTraitSet traitSet, RelNode input,
                                  List<DistributionField> fields,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
index a03c2f5d429..223882cc129 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SingleMergeExchangePrel.java
@@ -40,7 +40,7 @@
 
 public class SingleMergeExchangePrel extends ExchangePrel {
 
-  private final RelCollation collation ;
+  private final RelCollation collation;
 
   public SingleMergeExchangePrel(RelOptCluster cluster, RelTraitSet traitSet, RelNode input, RelCollation collation) {
     super(cluster, traitSet, input);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/StreamAggPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/StreamAggPrule.java
index 85f516a669f..99b6cde457e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/StreamAggPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/StreamAggPrule.java
@@ -70,7 +70,7 @@ public void onMatch(RelOptRuleCall call) {
         final RelTraitSet singleDistTrait = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL).plus(singleDist);
 
         if (create2PhasePlan(call, aggregate)) {
-          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL) ;
+          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL);
 
           RelNode convertedInput = convert(input, traits);
           new SubsetTransformer<DrillAggregateRel, InvalidRelException>(call){
@@ -138,7 +138,7 @@ public RelNode convertChild(final DrillAggregateRel join, final RelNode rel) thr
         // createTransformRequest(call, aggregate, input, traits);
 
         if (create2PhasePlan(call, aggregate)) {
-          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL) ;
+          traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL);
           RelNode convertedInput = convert(input, traits);
 
           new SubsetTransformer<DrillAggregateRel, InvalidRelException>(call){
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/NumberingRelWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/NumberingRelWriter.java
index 43e03007288..eddbe4f3452 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/NumberingRelWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/explain/NumberingRelWriter.java
@@ -84,7 +84,9 @@ protected void explain_(
     s.append("  ");
 
     if (id != null && id.opId == 0) {
-      for(int i =0; i < spacer.get(); i++){ s.append('-');}
+      for (int i = 0; i < spacer.get(); i++) {
+        s.append('-');
+      }
     }else{
       spacer.spaces(s);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
index ac491e92056..124857823ce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
@@ -248,7 +248,7 @@ public Prel visitUnnest(UnnestPrel unnestPrel, Void value) throws RuntimeExcepti
 
     for (String s : names) {
       if (uniqueNames.contains(s)) {
-        for (int i = 0; ; i++ ) {
+        for (int i = 0;; i++) {
           s = s + i;
           if (! origNames.contains(s) && ! uniqueNames.contains(s)) {
             break;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
index 9bb574ce251..90de9af0e7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
@@ -42,7 +42,7 @@
    * @param node
    * @return
    */
-  public ControlTunnel getTunnel(DrillbitEndpoint node) ;
+  public ControlTunnel getTunnel(DrillbitEndpoint node);
 
   public DrillbitEndpoint start(DrillbitEndpoint partialEndpoint, boolean allowPortHunting)
       throws DrillbitStartupException;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
index bf005b12ed8..22d0c7b5016 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
@@ -65,7 +65,7 @@ public ProfileWrapper(final QueryProfile profile, DrillConfig drillConfig) {
     this.profile = profile;
     this.id = profile.hasQueryId() ? profile.getQueryId() : QueryIdHelper.getQueryId(profile.getId());
     //Generating Operator Name map (DRILL-6140)
-    String profileTextPlan = profile.hasPlan() ? profile.getPlan() : "" ;
+    String profileTextPlan = profile.hasPlan()? profile.getPlan(): "";
     generateOpMap(profileTextPlan);
 
     final List<FragmentWrapper> fragmentProfiles = new ArrayList<>();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/SimpleDurationFormat.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/SimpleDurationFormat.java
index 5a2a37b6c63..cddb75ac078 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/SimpleDurationFormat.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/SimpleDurationFormat.java
@@ -72,6 +72,6 @@ public String verbose() {
     return (days > 0 ? days + " day " : "") +
         ((hours + days) > 0 ? hours + " hr " : "") +
         ((minutes + hours + days) > 0 ? String.format("%02d min ", minutes) : "") +
-        seconds + "." + String.format("%03d sec", milliSeconds) ;
+        seconds + "." + String.format("%03d sec", milliSeconds);
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfig.java
index 28cbe075b88..c3ff0c5dba8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfig.java
@@ -253,8 +253,7 @@ public String toString() {
           .append("\n\ttrustStorePassword: ").append(getPrintablePassword(getTrustStorePassword()))
           .append("\n\thandshakeTimeout: ").append(getHandshakeTimeout())
           .append("\n\tdisableHostVerification: ").append(disableHostVerification())
-          .append("\n\tdisableCertificateVerification: ").append(disableCertificateVerification())
-      ;
+          .append("\n\tdisableCertificateVerification: ").append(disableCertificateVerification());
     }
     return sb.toString();
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonProcessor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonProcessor.java
index d35743fceda..fba80e50782 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonProcessor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonProcessor.java
@@ -51,7 +51,7 @@
                                                        String msg,
                                                        Object... args);
 
-  public boolean ignoreJSONParseError() ;
+  public boolean ignoreJSONParseError();
 
   public void setIgnoreJSONParseErrors(boolean ignoreJSONParseErrors);
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
index ef8f861e5ba..6d52b6d0215 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
@@ -224,7 +224,7 @@ public void finishRecord() {
         // "col", "col_2", "col_2_2", "col_2_2_2".
         // No mapping scheme is perfect...
 
-        for (int l = 2;  ; l++) {
+        for (int l = 2;; l++) {
           final String rewritten = header + "_" + l;
           key = rewritten.toLowerCase();
           if (! idents.contains(key)) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextInput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextInput.java
index a0043efc3f4..9ee86ca0359 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextInput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextInput.java
@@ -96,7 +96,7 @@ public TextInput(TextParsingSettings settings, InputStream input, DrillBuf readB
     this.lineSeparator = settings.getNewLineDelimiter();
     byte normalizedLineSeparator = settings.getNormalizedNewLine();
     Preconditions.checkArgument(input instanceof Seekable, "Text input only supports an InputStream that supports Seekable.");
-    boolean isCompressed = input instanceof CompressionInputStream ;
+    boolean isCompressed = input instanceof CompressionInputStream;
     Preconditions.checkArgument(!isCompressed || startPos == 0, "Cannot use split on compressed stream.");
 
     // splits aren't allowed with compressed data.  The split length will be the compressed size which means we'll normally end prematurely.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
index c4f89699afa..c684e7a52fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/Records.java
@@ -130,9 +130,15 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
           break;
         // 2.  SqlTypeName enumerators whose names (currently) do not match SQL's
         //     values for DATA_TYPE:
-        case CHAR:                this.DATA_TYPE = "CHARACTER";         break;
-        case VARCHAR:             this.DATA_TYPE = "CHARACTER VARYING"; break;
-        case VARBINARY:           this.DATA_TYPE = "BINARY VARYING";    break;
+        case CHAR:
+          this.DATA_TYPE = "CHARACTER";
+          break;
+        case VARCHAR:
+          this.DATA_TYPE = "CHARACTER VARYING";
+          break;
+        case VARBINARY:
+          this.DATA_TYPE = "BINARY VARYING";
+          break;
         case INTERVAL_YEAR:
         case INTERVAL_YEAR_MONTH:
         case INTERVAL_MONTH:
@@ -145,7 +151,9 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
         case INTERVAL_HOUR_SECOND:
         case INTERVAL_MINUTE:
         case INTERVAL_MINUTE_SECOND:
-        case INTERVAL_SECOND:     this.DATA_TYPE = "INTERVAL";          break;
+        case INTERVAL_SECOND:
+          this.DATA_TYPE = "INTERVAL";
+          break;
         // 3:  SqlTypeName enumerators not yet seen and confirmed or handled.
         default:
           logger.warn( "Type not handled explicitly (code needs review): "
@@ -212,10 +220,18 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
           this.CHARACTER_OCTET_LENGTH = null;
           // This NUMERIC_PRECISION is in bits since NUMERIC_PRECISION_RADIX is 2.
           switch ( sqlTypeName ) {
-            case TINYINT:  NUMERIC_PRECISION =  8; break;
-            case SMALLINT: NUMERIC_PRECISION = 16; break;
-            case INTEGER:  NUMERIC_PRECISION = 32; break;
-            case BIGINT:   NUMERIC_PRECISION = 64; break;
+            case TINYINT:
+              NUMERIC_PRECISION = 8;
+              break;
+            case SMALLINT:
+              NUMERIC_PRECISION = 16;
+              break;
+            case INTEGER:
+              NUMERIC_PRECISION = 32;
+              break;
+            case BIGINT:
+              NUMERIC_PRECISION = 64;
+              break;
             default:
               throw new AssertionError(
                   "Unexpected " + sqlTypeName.getClass().getName() + " value "
@@ -253,9 +269,15 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
           this.CHARACTER_OCTET_LENGTH = null;
           // This NUMERIC_PRECISION is in bits since NUMERIC_PRECISION_RADIX is 2.
           switch ( sqlTypeName ) {
-            case REAL:   NUMERIC_PRECISION = 24; break;
-            case FLOAT:  NUMERIC_PRECISION = 24; break;
-            case DOUBLE: NUMERIC_PRECISION = 53; break;
+            case REAL:
+              NUMERIC_PRECISION = 24;
+              break;
+            case FLOAT:
+              NUMERIC_PRECISION = 24;
+              break;
+            case DOUBLE:
+              NUMERIC_PRECISION = 53;
+              break;
             default:
               throw new AssertionError(
                   "Unexpected type " + sqlTypeName + " in approximate-types branch" );
@@ -285,7 +307,9 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
           this.INTERVAL_TYPE = null;
           this.INTERVAL_PRECISION = null;
           switch(sqlTypeName) {
-          case DATE: this.COLUMN_SIZE = 10; break;// yyyy-MM-dd
+          case DATE:
+            this.COLUMN_SIZE = 10;
+            break;// yyyy-MM-dd
           case TIME: this.COLUMN_SIZE = this.DATETIME_PRECISION == 0
               ? 8 // HH::mm::ss
               : 8 + 1 + this.DATETIME_PRECISION;
@@ -373,8 +397,12 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
             switch(start) {
             case YEAR:
               switch(end) {
-              case YEAR: this.COLUMN_SIZE = INTERVAL_PRECISION + 2; break;// P..Y
-              case MONTH: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 5; break; // P..Y12M
+              case YEAR:
+                this.COLUMN_SIZE = INTERVAL_PRECISION + 2;
+                break;// P..Y
+              case MONTH:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 5;
+                break; // P..Y12M
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
@@ -382,7 +410,9 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
 
             case MONTH:
               switch(end) {
-              case MONTH: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 2; break; // P..M
+              case MONTH:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 2;
+                break; // P..M
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
@@ -390,10 +420,18 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
 
             case DAY:
               switch(end) {
-              case DAY: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 2; break; // P..D
-              case HOUR: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6; break; // P..DT12H
-              case MINUTE: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 9; break; // P..DT12H60M
-              case SECOND: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 12 + extraSecondIntervalSize; break; // P..DT12H60M60....S
+              case DAY:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 2;
+                break; // P..D
+              case HOUR:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6;
+                break; // P..DT12H
+              case MINUTE:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 9;
+                break; // P..DT12H60M
+              case SECOND:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 12 + extraSecondIntervalSize;
+                break; // P..DT12H60M60....S
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
@@ -401,9 +439,15 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
 
             case HOUR:
               switch(end) {
-              case HOUR: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3; break; // PT..H
-              case MINUTE: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6; break; // PT..H60M
-              case SECOND: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 9 + extraSecondIntervalSize; break; // PT..H12M60....S
+              case HOUR:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3;
+                break; // PT..H
+              case MINUTE:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6;
+                break; // PT..H60M
+              case SECOND:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 9 + extraSecondIntervalSize;
+                break; // PT..H12M60....S
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
@@ -411,8 +455,12 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
 
             case MINUTE:
               switch(end) {
-              case MINUTE: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3; break; // PT...M
-              case SECOND: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6 + extraSecondIntervalSize; break; // PT..M60....S
+              case MINUTE:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3;
+                break; // PT...M
+              case SECOND:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 6 + extraSecondIntervalSize;
+                break; // PT..M60....S
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
@@ -421,7 +469,9 @@ public Column(String catalog, String schemaName, String tableName, RelDataTypeFi
 
             case SECOND:
               switch(end) {
-              case SECOND: this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3 + extraSecondIntervalSize; break; // PT....S
+              case SECOND:
+                this.COLUMN_SIZE = this.INTERVAL_PRECISION + 3 + extraSecondIntervalSize;
+                break; // PT....S
               default:
                 throw new AssertionError("Unexpected interval type " + this.INTERVAL_TYPE + " in interval-types branch" );
               }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFilterBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFilterBuilder.java
index f45edbb8cb0..34c2fe70492 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFilterBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFilterBuilder.java
@@ -214,7 +214,7 @@ public LogicalExpression visitFunctionHolderExpression(FunctionHolderExpression
     }
 
     if (value.contains(funcHolderExpr)) {
-      ValueHolder result ;
+      ValueHolder result;
       try {
         result = InterpreterEvaluator.evaluateConstantExpr(udfUtilities, funcHolderExpr);
       } catch (Exception e) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
index b5f0ca48a86..7a0dbbe45c4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
@@ -167,7 +167,7 @@ protected void doOnMatch(RelOptRuleCall call, FilterPrel filter, ProjectPrel pro
       return;
     }
 
-    RelNode newScan = ScanPrel.create(scan, scan.getTraitSet(), newGroupScan, scan.getRowType());;
+    RelNode newScan = ScanPrel.create(scan, scan.getTraitSet(), newGroupScan, scan.getRowType());
 
     if (project != null) {
       newScan = project.copy(project.getTraitSet(), ImmutableList.of(newScan));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
index c343d317c7b..04740401784 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
@@ -78,7 +78,10 @@ ColumnDescriptor getColumnDescriptor() {
   int currDefLevel;
 
   // variables for a single read pass
-  long readStartInBytes = 0, readLength = 0, readLengthInBits = 0, recordsReadInThisIteration = 0;
+  long readStartInBytes = 0;
+  long readLength = 0;
+  long readLengthInBits = 0;
+  long recordsReadInThisIteration = 0;
   private ExecutorService threadPool;
 
   volatile boolean isShuttingDown; //Indicate to not submit any new AsyncPageReader Tasks during clear()
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataPathUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataPathUtils.java
index b9480e836cc..f7239d660c7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataPathUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataPathUtils.java
@@ -87,7 +87,7 @@ public static ParquetTableMetadata_v3 createMetadataWithRelativePaths(
       ParquetTableMetadata_v3 tableMetadataWithAbsolutePaths, String baseDir) {
     List<String> directoriesWithRelativePaths = Lists.newArrayList();
     for (String directory : tableMetadataWithAbsolutePaths.getDirectories()) {
-      directoriesWithRelativePaths.add(relativize(baseDir, directory)) ;
+      directoriesWithRelativePaths.add(relativize(baseDir, directory));
     }
     List<ParquetFileMetadata_v3> filesWithRelativePaths = Lists.newArrayList();
     for (ParquetFileMetadata_v3 file : tableMetadataWithAbsolutePaths.files) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
index b2961a5806c..54992076efc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/BlockMapBuilder.java
@@ -192,7 +192,7 @@ public long getLength() {
     final Timer.Context context = metrics.timer(BLOCK_MAP_BUILDER_TIMER).time();
     BlockLocation[] blocks;
     ImmutableRangeMap<Long,BlockLocation> blockMap;
-    blocks = fs.getFileBlockLocations(status, 0 , status.getLen());
+    blocks = fs.getFileBlockLocations(status, 0, status.getLen());
     ImmutableRangeMap.Builder<Long, BlockLocation> blockMapBuilder = new ImmutableRangeMap.Builder<Long,BlockLocation>();
     for (BlockLocation block : blocks) {
       long start = block.getOffset();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
index 8729a391bf1..270841f5bc1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/VectorUtil.java
@@ -56,7 +56,7 @@ public static void logVectorAccessibleContent(VectorAccessible va, final String
       int columnCounter = 0;
       for (VectorWrapper<?> vw : va) {
         boolean lastColumn = columnCounter == width - 1;
-        Object o ;
+        Object o;
         try{
           o = vw.getValueVector().getAccessor().getObject(row);
         } catch (Exception e) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/DataCollector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/DataCollector.java
index fa746770b18..30862efcc58 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/DataCollector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/DataCollector.java
@@ -24,7 +24,7 @@
 
 public interface DataCollector extends AutoCloseable {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DataCollector.class);
-  public boolean batchArrived(int minorFragmentId, RawFragmentBatch batch) throws IOException ;
+  public boolean batchArrived(int minorFragmentId, RawFragmentBatch batch) throws IOException;
   public int getOppositeMajorFragmentId();
   public RawBatchBuffer[] getBuffers();
   public int getTotalIncomingFragments();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
index 719f3679dfc..44b7f53fb9c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/UnlimitedRawBatchBuffer.java
@@ -40,7 +40,7 @@ public UnlimitedRawBatchBuffer(FragmentContext context, int fragmentCount) {
   }
 
   private class UnlimitedBufferQueue implements BufferQueue<RawFragmentBatch> {
-    private final LinkedBlockingDeque<RawFragmentBatch> buffer = Queues.newLinkedBlockingDeque();;
+    private final LinkedBlockingDeque<RawFragmentBatch> buffer = Queues.newLinkedBlockingDeque();
 
     @Override
     public void addOomBatch(RawFragmentBatch batch) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/rm/ResourceManagerBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/rm/ResourceManagerBuilder.java
index 4305891350e..f4e460d0ea5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/rm/ResourceManagerBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/rm/ResourceManagerBuilder.java
@@ -53,7 +53,7 @@
 
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ResourceManagerBuilder.class);
 
-  private DrillbitContext context ;
+  private DrillbitContext context;
 
   public ResourceManagerBuilder(final DrillbitContext context) {
     this.context = context;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index 18fc4e13751..802ecced3ba 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -85,7 +85,7 @@ protected void testSqlPlan(String sqlCommands) throws Exception {
     provider.start();
     final ScanResult scanResult = ClassPathScanner.fromPrescan(config);
     final LogicalPlanPersistence logicalPlanPersistence = new LogicalPlanPersistence(config, scanResult);
-    final SystemOptionManager systemOptions = new SystemOptionManager(logicalPlanPersistence , provider, config);
+    final SystemOptionManager systemOptions = new SystemOptionManager(logicalPlanPersistence, provider, config);
     systemOptions.init();
     @SuppressWarnings("resource")
     final UserSession userSession = UserSession.Builder.newBuilder().withOptionManager(systemOptions).build();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index d33fbeeecae..683248e2aff 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -293,7 +293,7 @@ public void testSelStarDifferentColumnOrder() throws Exception {
   @Test(expected = UserException.class)  // Should get "At line 1, column 8: Column 'n_nationkey' is ambiguous"
   public void testSelStarAmbiguousJoin() throws Exception {
     try {
-      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from (select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " ) ;
+      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from (select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " );
     } catch (UserException e) {
       logger.info("***** Test resulted in expected failure: " + e.getMessage());
       throw e;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
index 38f1d3a326a..d9d647e2ada 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
@@ -475,8 +475,8 @@ public void testUnionInputsGroupByOnCSV() throws Exception {
             "(select columns[0] c2 from cp.`%s` t2 \n" +
             "where t2.columns[0] is not null \n" +
             "group by columns[0])) \n" +
-            "group by col0"
-          , root, root)
+            "group by col0",
+            root, root)
         .unOrdered()
         .baselineColumns("col0")
         .baselineValues("290")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java b/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
index 0588a596505..5065a576ceb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/RunRootExec.java
@@ -55,7 +55,7 @@ public static void main(String args[]) throws Exception {
     FunctionImplementationRegistry registry = bitContext.getFunctionImplementationRegistry();
     FragmentContextImpl context = new FragmentContextImpl(bitContext, PlanFragment.getDefaultInstance(), null, registry);
     SimpleRootExec exec;
-    for (int i = 0; i < iterations; i ++) {
+    for (int i = 0; i < iterations; i++) {
       Stopwatch w = Stopwatch.createStarted();
       logger.info("STARTITER: {}", i);
       exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolderTest.java
index 883b94a97c2..bc6b7427cbe 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolderTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolderTest.java
@@ -204,7 +204,7 @@ public void testGetHoldersByFunctionNameWithVersion() {
     for (List<FunctionHolder> functionHolders : newJars.values()) {
       for (FunctionHolder functionHolder : functionHolders) {
         if ("lower".equals(functionHolder.getName())) {
-          expectedResult.add(functionHolder.getHolder()) ;
+          expectedResult.add(functionHolder.getHolder());
         }
       }
     }
@@ -220,7 +220,7 @@ public void testGetHoldersByFunctionName() {
     for (List<FunctionHolder> functionHolders : newJars.values()) {
       for (FunctionHolder functionHolder : functionHolders) {
         if ("lower".equals(functionHolder.getName())) {
-          expectedResult.add(functionHolder.getHolder()) ;
+          expectedResult.add(functionHolder.getHolder());
         }
       }
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
index 045c24d8f8b..975675aac8f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunction.java
@@ -85,7 +85,7 @@ public void testSortDate() throws Throwable {
   public void testCovarianceCorrelation() throws Throwable {
     String planPath = "/functions/test_covariance.json";
     String dataPath = "/covariance_input.json";
-    Double expectedValues[] = {4.571428571428571d, 4.857142857142857d, -6.000000000000002d, 4.0d , 4.25d, -5.250000000000002d, 1.0d, 0.9274260335029677d, -1.0000000000000004d};
+    Double expectedValues[] = {4.571428571428571d, 4.857142857142857d, -6.000000000000002d, 4.0d, 4.25d, -5.250000000000002d, 1.0d, 0.9274260335029677d, -1.0000000000000004d};
 
     runTest(expectedValues, planPath, dataPath);
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
index ff327330ef9..a9da761116d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
@@ -87,7 +87,7 @@ public void dateTruncOnDateSimpleUnits() throws Exception {
     testBuilder()
         .sqlQuery(query)
         .unOrdered()
-        .baselineColumns("second", "minute", "hour", "day", "month", "week" , "year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
+        .baselineColumns("second", "minute", "hour", "day", "month", "week", "year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
         .baselineValues(
             DateUtility.parseLocalDate("2011-02-03"), // seconds
             DateUtility.parseLocalDate("2011-02-03"), // minute
@@ -183,7 +183,7 @@ public void dateTruncOnTimeStampSimpleUnits() throws Exception {
     testBuilder()
         .sqlQuery(query)
         .unOrdered()
-        .baselineColumns("second", "minute", "hour", "day", "month", "week" , "year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
+        .baselineColumns("second", "minute", "hour", "day", "month", "week", "year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
         .baselineValues(
             DateUtility.parseLocalDateTime("2011-02-03 10:11:12.0"), // seconds
             DateUtility.parseLocalDateTime("2011-02-03 10:11:00.0"), // minute
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
index 3b81fa458d5..249ad5c6d9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
@@ -227,7 +227,7 @@ private ValueVector evalExprWithInterpreter(String expression, RecordBatch batch
   }
 
   private void showValueVectorContent(ValueVector vw) {
-    for (int row = 0; row < vw.getAccessor().getValueCount(); row ++ ) {
+    for (int row = 0; row < vw.getAccessor().getValueCount(); row++) {
       final Object o = vw.getAccessor().getObject(row);
       final String cellString;
       if (o instanceof byte[]) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
index 1ef9c7b75e8..2eb55dbaaac 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
@@ -65,7 +65,7 @@ public static void setup() throws Exception {
     addMiniDfsBasedStorage(createTestWorkspaces());
   }
 
-  private static Map<String , WorkspaceConfig> createTestWorkspaces() throws Exception {
+  private static Map<String, WorkspaceConfig> createTestWorkspaces() throws Exception {
     // Create "/tmp" folder and set permissions to "777"
     final Path tmpPath = new Path("/tmp");
     fs.delete(tmpPath, true);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
index 3501bfc51e6..315bc299a11 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/memory/TestAllocators.java
@@ -181,8 +181,7 @@ public void testAllocators() throws Exception {
     final DrillConfig config = DrillConfig.create(TEST_CONFIGURATIONS);
 
     try (final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
-        final Drillbit bit = new Drillbit(config, serviceSet)) {
-      ;
+         final Drillbit bit = new Drillbit(config, serviceSet)) {
       bit.run();
       final DrillbitContext bitContext = bit.getContext();
       FunctionImplementationRegistry functionRegistry = bitContext.getFunctionImplementationRegistry();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/PartitionLimit/TestPartitionLimitBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/PartitionLimit/TestPartitionLimitBatch.java
index 574ff768dad..926b7d9a68a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/PartitionLimit/TestPartitionLimitBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/PartitionLimit/TestPartitionLimitBatch.java
@@ -344,7 +344,7 @@ public void testPartitionLimit_PartitionIdSpanningAcrossBatches() {
     expectedRowSets.add(expectedRowSet1);
     expectedRowSets.add(expectedRowSet2);
 
-    testPartitionLimitCommon(0 ,1);
+    testPartitionLimitCommon(0, 1);
   }
 
   @Test
@@ -385,7 +385,7 @@ public void testPartitionLimit_PartitionIdSpanningAcrossBatches_WithOffset() {
 
     expectedRowSets.add(expectedRowSet1);
 
-    testPartitionLimitCommon(2 ,3);
+    testPartitionLimitCommon(2, 3);
   }
 
   /**
@@ -440,7 +440,7 @@ public void testPartitionLimit_PartitionIdSelectedAcrossBatches() {
     expectedRowSets.add(expectedRowSet1);
     expectedRowSets.add(expectedRowSet2);
 
-    testPartitionLimitCommon(0 ,5);
+    testPartitionLimitCommon(0, 5);
   }
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
index ff1edb932e8..0f47b802c1d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
@@ -159,7 +159,7 @@ public void testSimpleDecimalArithmetic() throws Exception {
 
             String addOutput[] = {"123456888.0", "22.2", "0.2", "-0.2", "-987654444.2","-3.0"};
             String subtractOutput[] = {"123456690.0", "0.0", "0.0", "0.0", "-987654198.0", "-1.0"};
-            String multiplyOutput[] = {"12222222111.00" , "123.21" , "0.01", "0.01",  "121580246927.41", "2.00"};
+            String multiplyOutput[] = {"12222222111.00", "123.21", "0.01", "0.01",  "121580246927.41", "2.00"};
 
             Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
 
@@ -208,7 +208,7 @@ public void testComplexDecimal() throws Exception {
             QueryDataBatch batch = results.get(0);
             assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
 
-            String addOutput[] = {"-99999998877.700000000", "11.423456789", "123456789.100000000", "-0.119998000", "100000000112.423456789" , "-99999999879.907000000", "123456789123456801.300000000"};
+            String addOutput[] = {"-99999998877.700000000", "11.423456789", "123456789.100000000", "-0.119998000", "100000000112.423456789", "-99999999879.907000000", "123456789123456801.300000000"};
             String subtractOutput[] = {"-100000001124.300000000", "10.823456789", "-123456788.900000000", "-0.120002000", "99999999889.823456789", "-100000000122.093000000", "123456789123456776.700000000"};
 
             Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDistributedFragmentRun.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDistributedFragmentRun.java
index 6fc3dbe5af8..8030573a2d0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDistributedFragmentRun.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDistributedFragmentRun.java
@@ -43,7 +43,8 @@
   public void oneBitOneExchangeOneEntryRun() throws Exception{
     RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-    try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet); DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());){
+    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+         DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = client.runQuery(QueryType.PHYSICAL, Files.toString(DrillFileUtils.getResourceAsFile("/physical_single_exchange.json"), Charsets.UTF_8));
@@ -63,7 +64,8 @@ public void oneBitOneExchangeOneEntryRun() throws Exception{
   public void oneBitOneExchangeTwoEntryRun() throws Exception{
     RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-    try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet); DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());){
+    try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+         DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = client.runQuery(QueryType.PHYSICAL, Files.toString(DrillFileUtils.getResourceAsFile("/physical_single_exchange_double_entry.json"), Charsets.UTF_8));
@@ -82,16 +84,17 @@ public void oneBitOneExchangeTwoEntryRun() throws Exception{
     public void oneBitOneExchangeTwoEntryRunLogical() throws Exception{
         RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-        try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet); DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());){
-            bit1.run();
-            client.connect();
-            List<QueryDataBatch> results = client.runQuery(QueryType.LOGICAL, Files.toString(DrillFileUtils.getResourceAsFile("/scan_screen_logical.json"), Charsets.UTF_8));
-            int count = 0;
-            for(QueryDataBatch b : results){
-                count += b.getHeader().getRowCount();
-                b.release();
-            }
-            assertEquals(100, count);
+        try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+             DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
+          bit1.run();
+          client.connect();
+          List<QueryDataBatch> results = client.runQuery(QueryType.LOGICAL, Files.toString(DrillFileUtils.getResourceAsFile("/scan_screen_logical.json"), Charsets.UTF_8));
+          int count = 0;
+          for (QueryDataBatch b : results) {
+            count += b.getHeader().getRowCount();
+            b.release();
+          }
+          assertEquals(100, count);
         }
 
 
@@ -101,7 +104,9 @@ public void oneBitOneExchangeTwoEntryRunLogical() throws Exception{
     public void twoBitOneExchangeTwoEntryRun() throws Exception{
       RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
 
-      try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet); Drillbit bit2 = new Drillbit(CONFIG, serviceSet); DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator());){
+      try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
+           Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
+           DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
         bit1.run();
         bit2.run();
         client.connect();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
index 60f1e677bcd..6b16aab5df7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
@@ -72,7 +72,7 @@ public void testNumericTypes() throws Exception {
       .ordered()
       .baselineColumns("kl", "vl");
 
-    for (long i = 0; i< 12 ; ++i) {
+    for (long i = 0; i < 12; ++i) {
       if (i %2 == 0) {
         builder.baselineValues(i, i);
       } else {
@@ -102,7 +102,7 @@ public void testNumericAndStringTypes() throws Exception {
       .ordered()
       .baselineColumns("kl", "vl");
 
-    for (long i = 0; i< 24 ; i+=2) {
+    for (long i = 0; i < 24; i+=2) {
         builder.baselineValues(i, i);
     }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggEmitOutcome.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggEmitOutcome.java
index b03a989d746..ebb1cad63fd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggEmitOutcome.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggEmitOutcome.java
@@ -84,7 +84,7 @@ private void testHashAggrEmit(int inp2_1[], int inp2_2[], String inp2_3[],  // f
     // First input batch
     RowSetBuilder builder2 = operatorFixture.rowSetBuilder(inputSchema);
     if ( inp2_1 != null ) {
-      for ( int i = 0; i < inp2_1.length ; i++) {
+      for (int i = 0; i < inp2_1.length; i++) {
         builder2 = builder2.addRow(inp2_1[i], inp2_2[i], inp2_3[i]);
       }
     }
@@ -93,7 +93,7 @@ private void testHashAggrEmit(int inp2_1[], int inp2_2[], String inp2_3[],  // f
     // Second input batch
     RowSetBuilder builder3 = operatorFixture.rowSetBuilder(inputSchema);
     if ( inp3_1 != null ) {
-      for ( int i = 0; i < inp3_1.length ; i++) {
+      for (int i = 0; i < inp3_1.length; i++) {
         builder3 = builder3.addRow(inp3_1[i], inp3_2[i], inp3_3[i]);
       }
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
index 295010a026f..aac93ead20f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
@@ -55,7 +55,7 @@
      *
      * @throws Exception
      */
-    private void testSpill(long maxMem, long numPartitions, long minBatches, int maxParallel, boolean fallback ,boolean predict,
+    private void testSpill(long maxMem, long numPartitions, long minBatches, int maxParallel, boolean fallback, boolean predict,
                            String sql, long expectedRows, int cycle, int fromPart, int toPart) throws Exception {
         ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
           .sessionOption(ExecConstants.HASHAGG_MAX_MEMORY_KEY,maxMem)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/common/HashPartitionTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/common/HashPartitionTest.java
index 6d064347ffa..70acf74ea7c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/common/HashPartitionTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/common/HashPartitionTest.java
@@ -216,7 +216,7 @@ public void run(SpillSet spillSet,
         hashPartition.completeAnInnerBatch(false, false);
         hashPartition.spillThisPartition();
         final String spillFile = hashPartition.getSpillFile();
-        final int batchesCount = hashPartition.getPartitionBatchesCount();;
+        final int batchesCount = hashPartition.getPartitionBatchesCount();
         hashPartition.closeWriter();
 
         SpilledRecordbatch spilledBuildBatch = new SpilledRecordbatch(spillFile, batchesCount, context, buildSchema, operatorContext, spillSet);
@@ -270,8 +270,8 @@ public void run(HashPartitionTestCase testCase) throws Exception {
         final BatchSchema probeSchema = new BatchSchema(BatchSchema.SelectionVectorMode.NONE, probeCols);
         final RecordBatch probeBatch = testCase.createProbeBatch(probeSchema, allocator);
 
-        final LogicalExpression buildColExpression = SchemaPath.getSimplePath(buildColB.getName());;
-        final LogicalExpression probeColExpression = SchemaPath.getSimplePath(probeColB.getName());;
+        final LogicalExpression buildColExpression = SchemaPath.getSimplePath(buildColB.getName());
+        final LogicalExpression probeColExpression = SchemaPath.getSimplePath(probeColB.getName());
 
         final JoinCondition condition = new JoinCondition(DrillJoinRel.EQUALITY_CONDITION, probeColExpression, buildColExpression);
         final List<Comparator> comparators = Lists.newArrayList(JoinUtils.checkAndReturnSupportedJoinComparator(condition));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
index 4ad29870580..35d1010cacc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
@@ -336,7 +336,7 @@ public void testKVGenFlatten2() throws Exception {
     // currently runs
     // TODO - re-verify results by hand
     if(RUN_ADVANCED_TESTS){
-      test("select flatten(kvgen(visited_cellid_counts)) as mytb from dfs.`tmp/mapkv.json`") ;
+      test("select flatten(kvgen(visited_cellid_counts)) as mytb from dfs.`tmp/mapkv.json`");
     }
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinSpill.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinSpill.java
index 5935d1ba192..1483b6fc523 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinSpill.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinSpill.java
@@ -101,7 +101,7 @@ public void testLeftOuterHashJoinSpill() {
     List<String> rightTable = Lists.newArrayList("[{\"rgt\": 0, \"b\" : \"a string\"}]",
       "[{\"rgt\": 0, \"b\" : \"a different string\"},{\"rgt\": 0, \"b\" : \"yet another\"}]");
     int numRows = 4_000; // 100_000
-    for ( int cnt = 1; cnt <= numRows / 2 ; cnt++ ) { // inner use only half, to check the left-outer join
+    for (int cnt = 1; cnt <= numRows / 2; cnt++) { // inner use only half, to check the left-outer join
       // leftTable.add("[{\"lft\": " + cnt + ", \"a\" : \"a string\"}]");
       rightTable.add("[{\"rgt\": " + cnt + ", \"b\" : \"a string\"}]");
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
index 1203f4892d7..108e9a2a9fe 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
@@ -99,7 +99,10 @@ public BatchAccessor batchAccessor() {
     }
 
     @Override
-    public boolean buildSchema() { buildSchemaCalled = true; return ! schemaEOF; }
+    public boolean buildSchema() {
+      buildSchemaCalled = true;
+      return !schemaEOF;
+    }
 
     @Override
     public boolean next() {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index 66ee3e4d410..807f7fdf293 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -164,7 +164,7 @@ public void testLargeFooter() throws Exception {
     final int numCols = 1000;
     String[] colNames = new String[numCols];
     Object[] values = new Object[numCols];
-    for (int i = 0 ; i < numCols - 1; i++) {
+    for (int i = 0; i < numCols - 1; i++) {
       sb.append(String.format("\"col_%d\" : 100,", i));
       colNames[i] = "col_" + i;
       values[i] = 100L;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMapArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMapArray.java
index 653137eef47..3d816c32900 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMapArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMapArray.java
@@ -101,8 +101,7 @@ public void testBasics() {
       .addRow(30, mapArray(
           mapValue(310, "d3.1"),
           mapValue(320, "d3.2"),
-          mapValue(330, "d3.3")))
-      ;
+          mapValue(330, "d3.3")));
 
     // Verify the first batch
 
@@ -144,8 +143,7 @@ public void testBasics() {
       .addRow(60, mapArray(
           mapValue(610, "d6.1", "e6.1"),
           mapValue(620, "d6.2", null),
-          mapValue(630, "d6.3", "e6.3")))
-      ;
+          mapValue(630, "d6.3", "e6.3")));
 
     // Verify the second batch
 
@@ -207,8 +205,7 @@ public void testNestedArray() {
       .addRow(30, mapArray(
           mapValue(310, strArray("d3.1.1", "d3.2.2")),
           mapValue(320, strArray()),
-          mapValue(330, strArray("d3.3.1", "d1.2.2"))))
-      ;
+          mapValue(330, strArray("d3.3.1", "d1.2.2"))));
 
     // Verify the batch
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMaps.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMaps.java
index 98ba3ed6b85..f876fb83da2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMaps.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderMaps.java
@@ -553,8 +553,7 @@ public void testMapWithArray() {
       .addRow(10, mapValue(intArray(110, 120, 130),
                            strArray("d1.1", "d1.2", "d1.3", "d1.4")))
       .addRow(20, mapValue(intArray(210), strArray()))
-      .addRow(30, mapValue(intArray(), strArray("d3.1")))
-      ;
+      .addRow(30, mapValue(intArray(), strArray("d3.1")));
 
     // Validate first batch
 
@@ -573,15 +572,13 @@ public void testMapWithArray() {
     rsLoader.startBatch();
     rootWriter
       .addRow(40, mapValue(intArray(410, 420), strArray("d4.1", "d4.2")))
-      .addRow(50, mapValue(intArray(510), strArray("d5.1")))
-      ;
+      .addRow(50, mapValue(intArray(510), strArray("d5.1")));
 
     TupleWriter mapWriter = rootWriter.tuple("m");
     mapWriter.addColumn(SchemaBuilder.columnSchema("e", MinorType.VARCHAR, DataMode.REPEATED));
     rootWriter
       .addRow(60, mapValue(intArray(610, 620), strArray("d6.1", "d6.2"), strArray("e6.1", "e6.2")))
-      .addRow(70, mapValue(intArray(710), strArray(), strArray("e7.1", "e7.2")))
-      ;
+      .addRow(70, mapValue(intArray(710), strArray(), strArray("e7.1", "e7.2")));
 
     // Validate first batch. The new array should have been back-filled with
     // empty offsets for the missing rows.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTorture.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTorture.java
index 826b71eb66a..bcf01de98ce 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTorture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTorture.java
@@ -250,7 +250,7 @@ public int rowCount() {
     public BatchReader(TestSetup setup, RowSetReader reader, ReadState readState) {
       this.setup = setup;
       this.rootReader = reader;
-      this.readState = readState;;
+      this.readState = readState;
 
       TupleReader m1Reader = rootReader.tuple("m1");
       n1Reader = m1Reader.scalar("n1");
@@ -351,12 +351,11 @@ public void checkInt(ScalarReader reader, int id, int cycle) {
 
   @Test
   public void tortureTest() {
-    LogFixtureBuilder logBuilder = new LogFixtureBuilder()
+    LogFixtureBuilder logBuilder = new LogFixtureBuilder();
 
         // Enable to get detailed tracing when things go wrong.
 
 //        .logger("org.apache.drill.exec.physical.rowSet", Level.TRACE)
-        ;
     try (LogFixture logFixture = logBuilder.build()) {
       doTortureTest();
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
index 6374f1f2c8c..79f260ff8d7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
@@ -91,14 +91,14 @@ public MiniPlanTestBuilder expectSchema(BatchSchema batchSchema) {
      * @param baselineValues
      * @return
      */
-    public MiniPlanTestBuilder baselineValues(Object ... baselineValues) {
+    public MiniPlanTestBuilder baselineValues(Object... baselineValues) {
       if (baselineRecords == null) {
         baselineRecords = new ArrayList<>();
       }
 
       Map<String, Object> ret = new HashMap<>();
       int i = 0;
-      Preconditions.checkArgument(expectSchema != null , "Expected schema should be set before specify baseline values.");
+      Preconditions.checkArgument(expectSchema != null, "Expected schema should be set before specify baseline values.");
       Preconditions.checkArgument(baselineValues.length == expectSchema.getFieldCount(),
           "Must supply the same number of baseline values as columns in expected schema.");
 
@@ -230,7 +230,7 @@ public void go() throws Exception {
     protected long maxAllocation = MAX_ALLOCATION;
 
     final private List<RecordBatch> inputs = Lists.newArrayList();
-    final PopBuilder parent ;
+    final PopBuilder parent;
 
     public PopBuilder() {
       this.parent = null;
@@ -334,13 +334,13 @@ public T fileSystem(DrillFileSystem fs) {
     }
 
     @SuppressWarnings("unchecked")
-    public T columnsToRead(SchemaPath ... columnsToRead) {
+    public T columnsToRead(SchemaPath... columnsToRead) {
       this.columnsToRead = Lists.newArrayList(columnsToRead);
       return (T) this;
     }
 
     @SuppressWarnings("unchecked")
-    public T columnsToRead(String ... columnsToRead) {
+    public T columnsToRead(String... columnsToRead) {
       this.columnsToRead = Lists.newArrayList();
 
       for (String column : columnsToRead) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
index 3d99adbc19e..4a116571481 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
@@ -525,7 +525,7 @@ private void testSingleInputNullBatchHandling(PhysicalOperator pop) throws Excep
         .expectNullBatch(true)
         .go();
 
-    final RecordBatch input2 = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_EMPTY_JSON2);;
+    final RecordBatch input2 = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_EMPTY_JSON2);
     RecordBatch batch2 = new PopBuilder()
         .physicalOperator(pop)
         .addInput(input2)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
index eb505195ae7..3e47db7c593 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
@@ -1333,7 +1333,7 @@ public void testEmptyBatchRepeatedMap() {
       ValueVector valueVector1 = mapVector.getChild("value");
       assertEquals(((Integer.highestOneBit(testRowCount * STD_REPETITION_FACTOR) << 1)), keyVector.getValueCapacity());
       offsetVector = ((VariableWidthVector)valueVector1).getOffsetVector();
-      assertEquals((Integer.highestOneBit(testRowCount * STD_REPETITION_FACTOR) << 1) , offsetVector.getValueCapacity());
+      assertEquals((Integer.highestOneBit(testRowCount * STD_REPETITION_FACTOR) << 1), offsetVector.getValueCapacity());
       assertEquals(Integer.highestOneBit(testRowCount * STD_REPETITION_FACTOR << 1)  - 1, valueVector1.getValueCapacity());
 
       // Allocates the same as value passed since it is already power of two.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
index 1da827a1d7a..4313ad02578 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
@@ -138,8 +138,8 @@ public void testTimeStamp() throws Exception {
 
                 ValueVector.Accessor accessor = v.getValueVector().getAccessor();
 
-                assertEquals(accessor.getObject(0).toString() ,"1970-01-02 10:20:33.000");
-                assertEquals(accessor.getObject(1).toString() ,"2008-12-28 11:34:00.129");
+                assertEquals(accessor.getObject(0).toString(),"1970-01-02 10:20:33.000");
+                assertEquals(accessor.getObject(1).toString(),"2008-12-28 11:34:00.129");
                 assertEquals(accessor.getObject(2).toString(), "2000-02-27 14:24:00.000");
             }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
index 787584de9e4..f2f550acb12 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
@@ -54,7 +54,7 @@ public void testBackPathBad() throws Exception {
             {"/tmp", "../etc/bad"},  //  goes outside parent; resolves to /etc/bad
             {"", "/bad"},            //  empty parent
             {"/", ""},               //  empty path
-        } ;
+        };
 
 
     for (int i = 0; i < badPaths.length; i++) {
@@ -82,7 +82,7 @@ public void testBackPathGood() throws Exception {
             {"/", "etc/tmp/../../good"},   //  no leading slash in path
             {"/", "../good"},              //  resolves to /../good which is OK
             {"/", "/good"}
-        } ;
+        };
 
     for (int i = 0; i < goodPaths.length; i++) {
       try {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetInternalsTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetInternalsTest.java
index 5a5207bfdb3..caa4e737d79 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetInternalsTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetInternalsTest.java
@@ -39,9 +39,9 @@
 
   @BeforeClass
   public static void setup( ) throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
       // Set options, etc.
-      ;
+
     startCluster(builder);
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetSimpleTestFileGenerator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetSimpleTestFileGenerator.java
index 15dd633152b..2e6923a622f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetSimpleTestFileGenerator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetSimpleTestFileGenerator.java
@@ -87,7 +87,7 @@
           //  "      required int64 _TIMESTAMP_MICROS_int64  ( TIMESTAMP_MICROS ) ; \n" +
           "  required fixed_len_byte_array(12) _INTERVAL_fixed_len_byte_array_12  ( INTERVAL ) ; \n" +
           "  required int96  _INT96_RAW  ; \n" +
-          "} \n" ;
+          "} \n";
   public static String simpleNullableSchemaMsg =
       "message ParquetLogicalDataTypes { \n" +
           "  required int32 rowKey; \n" +
@@ -115,7 +115,7 @@
           //  "      optional int64 _TIMESTAMP_MICROS_int64  ( TIMESTAMP_MICROS ) ; \n" +
           "  optional fixed_len_byte_array(12) _INTERVAL_fixed_len_byte_array_12  ( INTERVAL ) ; \n" +
           "  optional int96  _INT96_RAW  ; \n" +
-          "} \n" ;
+          "} \n";
 
   public static String complexSchemaMsg =
       "message ParquetLogicalDataTypes { \n" +
@@ -160,7 +160,7 @@
           "      required int96  _INT96_RAW  ; \n" +
           "    } \n" +
           "  } \n" +
-          "} \n" ;
+          "} \n";
   public static String complexNullableSchemaMsg =
       "message ParquetLogicalDataTypes { \n" +
           "  required int32 rowKey; \n" +
@@ -204,7 +204,7 @@
           "      optional int96  _INT96_RAW  ; \n" +
           "    } \n" +
           "  } \n" +
-          "} \n" ;
+          "} \n";
 
   public static MessageType simpleSchema = MessageTypeParser.parseMessageType(simpleSchemaMsg);
   public static MessageType complexSchema = MessageTypeParser.parseMessageType(complexSchemaMsg);
@@ -292,7 +292,8 @@ public static void writeComplexValues(GroupFactory gf, ParquetWriter<Group> comp
           .append("_INT_64", 0x7FFFFFFFFFFFFFFFL)
           .append("_UINT_64", 0xFFFFFFFFFFFFFFFFL)
           .append("_DECIMAL_decimal18", 0xFFFFFFFFFFFFFFFFL);
-      byte[] bytes = new byte[30]; Arrays.fill(bytes, (byte)1);
+      byte[] bytes = new byte[30];
+      Arrays.fill(bytes, (byte) 1);
       numeric.addGroup("FixedLen").append("_DECIMAL_fixed_n", Binary.fromConstantByteArray(bytes, 0, 20));
       numeric.addGroup("Binary").append("_DECIMAL_unlimited", Binary.fromConstantByteArray(bytes, 0, 30));
       numeric.addGroup("DateTimeTypes")
@@ -375,7 +376,8 @@ public static void writeSimpleValues(SimpleGroupFactory sgf, ParquetWriter<Group
     }
     {
       Group simpleGroup = sgf.newGroup();
-      byte[] bytes = new byte[30]; Arrays.fill(bytes, (byte)1);
+      byte[] bytes = new byte[30];
+      Arrays.fill(bytes, (byte) 1);
       simpleGroup.append("rowKey", ++rowKey);
       simpleGroup.append("_UTF8", "UTF8 string" + rowKey)
           .append("_Enum", MAX_VALUE.toString())
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
index c1479809583..c309cf3152f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
@@ -215,10 +215,10 @@ public void testComplexLogicalIntTypes() throws Exception {
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns(columns)
-        .baselineValues(mapOf("a","a","b","b")  , 0L                   , 0           , 0        , 0       , 0L                    , 0            , 0       ,0       )
-        .baselineValues(mapOf("a","a","b","b")  , -1L                  , -1          , -1       , -1      , -1L                   , -1           , -1      , -1     )
-        .baselineValues(mapOf("a","a","b","b")  , 1L                   , 1           , 1        , 1       , -9223372036854775808L , 1            , 1       , 1      )
-        .baselineValues(mapOf("a","a","b","b")  , 9223372036854775807L , 2147483647  , 65535    , 255     , 9223372036854775807L  , -2147483648  , -32768  , -128   )
+        .baselineValues(mapOf("a","a","b","b"), 0L, 0, 0, 0, 0L, 0, 0, 0)
+        .baselineValues(mapOf("a","a","b","b"), -1L, -1, -1, -1, -1L, -1, -1, -1)
+        .baselineValues(mapOf("a","a","b","b"), 1L, 1, 1, 1, -9223372036854775808L, 1, 1, 1)
+        .baselineValues(mapOf("a","a","b","b"), 9223372036854775807L, 2147483647, 65535, 255, 9223372036854775807L, -2147483648, -32768, -128)
         .build()
         .run();
   }
@@ -253,23 +253,23 @@ public void testComplexLogicalIntTypes2() throws Exception {
         " cp.`store/parquet/complex/parquet_logical_types_complex.parquet` t " +
         " order by t.rowKey ";
     String[] columns = {
-        "rowKey " ,
-        "_UTF8" ,
-        "_Enum" ,
-        "_INT32_RAW" ,
-        "_INT_8" ,
-        "_INT_16" ,
-        "_INT_32" ,
-        "_UINT_8" ,
-        "_UINT_16" ,
-        "_UINT_32" ,
-        "_INT64_RAW" ,
-        "_INT_64" ,
-        "_UINT_64" ,
-        "_DATE_int32" ,
-        "_TIME_MILLIS_int32" ,
-        "_TIMESTAMP_MILLIS_int64" ,
-        "_INTERVAL_fixed_len_byte_array_12" ,
+        "rowKey ",
+        "_UTF8",
+        "_Enum",
+        "_INT32_RAW",
+        "_INT_8",
+        "_INT_16",
+        "_INT_32",
+        "_UINT_8",
+        "_UINT_16",
+        "_UINT_32",
+        "_INT64_RAW",
+        "_INT_64",
+        "_UINT_64",
+        "_DATE_int32",
+        "_TIME_MILLIS_int32",
+        "_TIMESTAMP_MILLIS_int64",
+        "_INTERVAL_fixed_len_byte_array_12",
         "_INT96_RAW"
 
     };
@@ -327,23 +327,23 @@ public void testComplexLogicalIntTypes3() throws Exception {
             " cp.`store/parquet/complex/parquet_logical_types_complex_nullable.parquet` t " +
             " order by t.rowKey ";
     String[] columns = {
-        "rowKey " ,
-        "_UTF8" ,
-        "_Enum" ,
-        "_INT32_RAW" ,
-        "_INT_8" ,
-        "_INT_16" ,
-        "_INT_32" ,
-        "_UINT_8" ,
-        "_UINT_16" ,
-        "_UINT_32" ,
-        "_INT64_RAW" ,
-        "_INT_64" ,
-        "_UINT_64" ,
-        "_DATE_int32" ,
-        "_TIME_MILLIS_int32" ,
-        "_TIMESTAMP_MILLIS_int64" ,
-        "_INTERVAL_fixed_len_byte_array_12" ,
+        "rowKey ",
+        "_UTF8",
+        "_Enum",
+        "_INT32_RAW",
+        "_INT_8",
+        "_INT_16",
+        "_INT_32",
+        "_UINT_8",
+        "_UINT_16",
+        "_UINT_32",
+        "_INT64_RAW",
+        "_INT_64",
+        "_UINT_64",
+        "_DATE_int32",
+        "_TIME_MILLIS_int32",
+        "_TIMESTAMP_MILLIS_int64",
+        "_INTERVAL_fixed_len_byte_array_12",
         "_INT96_RAW"
 
     };
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
index 9db8398bb03..68c1fceb223 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetPhysicalPlan.java
@@ -54,7 +54,8 @@ public void testParseParquetPhysicalPlan() throws Exception {
     RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
     DrillConfig config = DrillConfig.create();
 
-    try (Drillbit bit1 = new Drillbit(config, serviceSet); DrillClient client = new DrillClient(config, serviceSet.getCoordinator())) {
+    try (Drillbit bit1 = new Drillbit(config, serviceSet);
+         DrillClient client = new DrillClient(config, serviceSet.getCoordinator())) {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Resources.toString(Resources.getResource(fileName),Charsets.UTF_8));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
index bb242ba2e32..941f50f3900 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
@@ -117,10 +117,10 @@ public void testLogicalIntTypes() throws Exception {
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns(columns)
-        .baselineValues( 0L                   , 0           , 0        , 0       , 0L                    , 0            , 0       ,0       )
-        .baselineValues( -1L                  , -1          , -1       , -1      , -1L                   , -1           , -1      , -1     )
-        .baselineValues( 1L                   , 1           , 1        , 1       , -9223372036854775808L , 1            , 1       , 1      )
-        .baselineValues( 9223372036854775807L , 2147483647  , 65535    , 255     , 9223372036854775807L  , -2147483648  , -32768  , -128   )
+        .baselineValues(0L, 0, 0, 0, 0L, 0, 0, 0)
+        .baselineValues(-1L, -1, -1, -1, -1L, -1, -1, -1)
+        .baselineValues(1L, 1, 1, 1, -9223372036854775808L, 1, 1, 1)
+        .baselineValues(9223372036854775807L, 2147483647, 65535, 255, 9223372036854775807L, -2147483648, -32768, -128)
         .build()
         .run();
   }
@@ -128,7 +128,8 @@ public void testLogicalIntTypes() throws Exception {
   @Test //DRILL-5971
   public void testLogicalIntTypes2() throws Exception {
     byte[] bytes12 = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b' };
-    byte[] bytesOnes = new byte[12]; Arrays.fill(bytesOnes, (byte)1);
+    byte[] bytesOnes = new byte[12];
+    Arrays.fill(bytesOnes, (byte)1);
     byte[] bytesZeros = new byte[12];
     String query = String.format(
         " select " +
@@ -155,23 +156,23 @@ public void testLogicalIntTypes2() throws Exception {
             " order by t.rowKey "
     );
     String[] columns = {
-        "rowKey " ,
-        "_UTF8" ,
-        "_Enum" ,
-        "_INT32_RAW" ,
-        "_INT_8" ,
-        "_INT_16" ,
-        "_INT_32" ,
-        "_UINT_8" ,
-        "_UINT_16" ,
-        "_UINT_32" ,
-        "_INT64_RAW" ,
-        "_INT_64" ,
-        "_UINT_64" ,
-        "_DATE_int32" ,
-        "_TIME_MILLIS_int32" ,
-        "_TIMESTAMP_MILLIS_int64" ,
-        "_INTERVAL_fixed_len_byte_array_12" ,
+        "rowKey ",
+        "_UTF8",
+        "_Enum",
+        "_INT32_RAW",
+        "_INT_8",
+        "_INT_16",
+        "_INT_32",
+        "_UINT_8",
+        "_UINT_16",
+        "_UINT_32",
+        "_INT64_RAW",
+        "_INT_64",
+        "_UINT_64",
+        "_DATE_int32",
+        "_TIME_MILLIS_int32",
+        "_TIMESTAMP_MILLIS_int64",
+        "_INTERVAL_fixed_len_byte_array_12",
         "_INT96_RAW"
 
     };
@@ -202,7 +203,8 @@ public void testLogicalIntTypes2() throws Exception {
   @Test //DRILL-5971
   public void testLogicalIntTypes3() throws Exception {
     byte[] bytes12 = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b' };
-    byte[] bytesOnes = new byte[12]; Arrays.fill(bytesOnes, (byte)1);
+    byte[] bytesOnes = new byte[12];
+    Arrays.fill(bytesOnes, (byte)1);
     byte[] bytesZeros = new byte[12];
     String query = String.format(
         " select " +
@@ -229,23 +231,23 @@ public void testLogicalIntTypes3() throws Exception {
             " order by t.rowKey "
     );
     String[] columns = {
-        "rowKey " ,
-        "_UTF8" ,
-        "_Enum" ,
-        "_INT32_RAW" ,
-        "_INT_8" ,
-        "_INT_16" ,
-        "_INT_32" ,
-        "_UINT_8" ,
-        "_UINT_16" ,
-        "_UINT_32" ,
-        "_INT64_RAW" ,
-        "_INT_64" ,
-        "_UINT_64" ,
-        "_DATE_int32" ,
-        "_TIME_MILLIS_int32" ,
-        "_TIMESTAMP_MILLIS_int64" ,
-        "_INTERVAL_fixed_len_byte_array_12" ,
+        "rowKey ",
+        "_UTF8",
+        "_Enum",
+        "_INT32_RAW",
+        "_INT_8",
+        "_INT_16",
+        "_INT_32",
+        "_UINT_8",
+        "_UINT_16",
+        "_UINT_32",
+        "_INT64_RAW",
+        "_INT_64",
+        "_UINT_64",
+        "_DATE_int32",
+        "_TIME_MILLIS_int32",
+        "_TIMESTAMP_MILLIS_int64",
+        "_INTERVAL_fixed_len_byte_array_12",
         "_INT96_RAW"
 
     };
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/TestSplitAndTransfer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/TestSplitAndTransfer.java
index 96dbd7caa2b..d8a9e03a918 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/TestSplitAndTransfer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/TestSplitAndTransfer.java
@@ -170,9 +170,9 @@ public void testBitVectorImpl(int valueCount, final int[][] startLengths, TestBi
     bitVector.allocateNew(valueCount  + 8); // extra byte at the end that gets filled with junk
     final int[] compareArray = new int[valueCount];
 
-    int testBitValue = 0 ;
+    int testBitValue = 0;
     final BitVector.Mutator mutator = bitVector.getMutator();
-    for (int i = 0; i < valueCount; i ++) {
+    for (int i = 0; i < valueCount; i++) {
       testBitValue = getBit(pattern, i);
       mutator.set(i, testBitValue);
       compareArray[i] = testBitValue;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index ccab7969d63..b20c7e24f08 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -591,7 +591,7 @@ public void drill_4479() throws Exception {
       table_dir.mkdir();
       BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
       // Create an entire batch of null values for 3 columns
-      for (int i = 0 ; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
+      for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
         os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
       }
       // Add a row with {bigint,  float, string} values
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java b/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java
index e037d027113..d7028a51fc6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java
@@ -627,7 +627,7 @@ public void compareMergedOnHeapVectors() throws Exception {
 
       compareMergedVectors(expectedSuperVectors, actualSuperVectors);
     } catch (Exception e) {
-      throw new Exception(e.getMessage() + "\nFor query: " + query , e);
+      throw new Exception(e.getMessage() + "\nFor query: " + query, e);
     } finally {
       cleanupBatches(expected, actual);
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
index 77ee6e96dff..6b69d2b59bd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
@@ -134,7 +134,8 @@ public void secondTest() throws Exception {
 
       ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).configProperty(ExecConstants.SLICE_TARGET, 10);
 
-      try (ClusterFixture cluster = builder.build(); ClientFixture client = cluster.clientFixture()) {
+      try (ClusterFixture cluster = builder.build();
+           ClientFixture client = cluster.clientFixture()) {
         String sql = "SELECT * FROM `dfs`.`test/employee.json`";
         logger.info(client.queryBuilder().sql(sql).explainJson());
         QuerySummary results = client.queryBuilder().sql(sql).run();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryRowSetIterator.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryRowSetIterator.java
index c1b925313eb..addd68baa95 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/QueryRowSetIterator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryRowSetIterator.java
@@ -52,7 +52,7 @@
 
   @Override
   public boolean hasNext() {
-    for ( ; ; ) {
+    for (;;) {
       QueryEvent event = listener.get();
       state = event.state;
       batch = null;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
index 5060e0aa6f9..7b00cb48ea1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
@@ -334,7 +334,7 @@ public TestBuilder expectsNumRecords(int expectedNumRecords) {
    * @param baselineValues - the baseline values to validate
    * @return the test builder
    */
-  public TestBuilder baselineValues(Object ... baselineValues) {
+  public TestBuilder baselineValues(Object... baselineValues) {
     assert getExpectedSchema() == null : "The expected schema is not needed when baselineValues are provided ";
     if (ordered == null) {
       throw new RuntimeException("Ordering not set, before specifying baseline data you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName());
@@ -410,7 +410,7 @@ public BaselineQueryTestBuilder sqlBaselineQuery(Object baselineQuery) {
         baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches);
   }
 
-  public BaselineQueryTestBuilder sqlBaselineQuery(String query, String ...replacements) {
+  public BaselineQueryTestBuilder sqlBaselineQuery(String query, String... replacements) {
     return sqlBaselineQuery(String.format(query, (Object[]) replacements));
   }
 
@@ -442,7 +442,7 @@ private String getDecimalPrecisionScaleInfo(TypeProtos.MajorType type) {
         precision = String.format("(%d,%d)", type.getPrecision(), type.getScale());
         break;
       default:
-        ; // do nothing empty string set above
+        // do nothing empty string set above
     }
     return precision;
   }
@@ -471,7 +471,7 @@ public CSVTestBuilder baselineTypes(TypeProtos.MajorType... baselineTypes) {
     }
 
     // convenience method to convert minor types to major types if no decimals with precisions are needed
-    public CSVTestBuilder baselineTypes(TypeProtos.MinorType ... baselineTypes) {
+    public CSVTestBuilder baselineTypes(TypeProtos.MinorType... baselineTypes) {
       TypeProtos.MajorType[] majorTypes = new TypeProtos.MajorType[baselineTypes.length];
       int i = 0;
       for(TypeProtos.MinorType minorType : baselineTypes) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/TestGracefulShutdown.java b/exec/java-exec/src/test/java/org/apache/drill/test/TestGracefulShutdown.java
index bec1691078f..9e3721cf029 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/TestGracefulShutdown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/TestGracefulShutdown.java
@@ -71,7 +71,7 @@ private static void enableDrillPortHunting(ClusterFixtureBuilder builder) {
   @Test
   public void testOnlineEndPoints() throws  Exception {
 
-    String[] drillbits = {"db1" ,"db2","db3"};
+    String[] drillbits = {"db1", "db2", "db3"};
     ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher).withLocalZk().withBits(drillbits);
     enableDrillPortHunting(builder);
 
@@ -142,7 +142,7 @@ public void testRestApi() throws Exception {
   @Test
   public void testRestApiShutdown() throws Exception {
 
-    String[] drillbits = {"db1" ,"db2", "db3"};
+    String[] drillbits = {"db1", "db2", "db3"};
     ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher).withLocalZk().withBits(drillbits);
     enableWebServer(builder);
     QueryBuilder.QuerySummaryFuture listener;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFixedWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFixedWidthWriter.java
index adaeb33e229..8177d9f83a3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFixedWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFixedWidthWriter.java
@@ -401,7 +401,7 @@ public boolean canExpand(ScalarWriter writer, int delta) {
       });
       writer.startWrite();
       try {
-        for (int i = 0; ; i++ ) {
+        for (int i = 0;; i++ ) {
           index.index = i;
           writer.startRow();
           writer.setInt(i);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestOffsetVectorWriter.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestOffsetVectorWriter.java
index 78257e9abff..f9b835f6b52 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestOffsetVectorWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestOffsetVectorWriter.java
@@ -384,7 +384,7 @@ public boolean canExpand(ScalarWriter writer, int delta) {
       });
       writer.startWrite();
       try {
-        for (int i = 0; ; i++ ) {
+        for (int i = 0;; i++ ) {
           index.index = i;
           writer.startRow();
           writer.setNextOffset(i);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariableWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariableWidthWriter.java
index 5e5e0298f1d..2b93b425734 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariableWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariableWidthWriter.java
@@ -382,7 +382,7 @@ public boolean canExpand(ScalarWriter writer, int delta) {
       byte value[] = new byte[423];
       Arrays.fill(value, (byte) 'X');
       try {
-        for (int i = 0; ; i++ ) {
+        for (int i = 0;; i++ ) {
           index.index = i;
           writer.startRow();
           writer.setBytes(value, value.length);
diff --git a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java
index f73a1b9ebb8..6319cb32e4d 100644
--- a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java
+++ b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java
@@ -175,7 +175,8 @@ public void executeJdbcAllQuery() throws Exception {
   private static void printQuery(Connection c, String query) throws SQLException {
     final StringBuilder sb = new StringBuilder();
 
-    try (Statement s = c.createStatement(); ResultSet result = s.executeQuery(query)) {
+    try (Statement s = c.createStatement();
+         ResultSet result = s.executeQuery(query)) {
       while (result.next()) {
         final int columnCount = result.getMetaData().getColumnCount();
         for(int i = 1; i < columnCount+1; i++){
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillColumnMetaDataList.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillColumnMetaDataList.java
index 90dcafe06ce..27132f655bd 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillColumnMetaDataList.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillColumnMetaDataList.java
@@ -128,10 +128,16 @@ public void updateColumnMetaData(String catalogName, String schemaName,
 
       final int nullability;
       switch ( field.getDataMode() ) {
-        case OPTIONAL: nullability = ResultSetMetaData.columnNullable; break;
-        case REQUIRED: nullability = ResultSetMetaData.columnNoNulls;  break;
+        case OPTIONAL:
+          nullability = ResultSetMetaData.columnNullable;
+          break;
+        case REQUIRED:
+          nullability = ResultSetMetaData.columnNoNulls;
+          break;
         // Should REPEATED still map to columnNoNulls? or to columnNullable?
-        case REPEATED: nullability = ResultSetMetaData.columnNoNulls;  break;
+        case REPEATED:
+          nullability = ResultSetMetaData.columnNoNulls;
+          break;
         default:
           throw new AssertionError( "Unexpected new DataMode value '"
                                     + field.getDataMode().name() + "'" );
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/proxy/ProxiesManager.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/proxy/ProxiesManager.java
index c014ebf8fb2..3d0bd941ab0 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/proxy/ProxiesManager.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/proxy/ProxiesManager.java
@@ -107,8 +107,7 @@ public ProxiesManager( final InvocationReporter reporter ) {
       catch ( InstantiationException | IllegalAccessException
               | IllegalArgumentException | InvocationTargetException
               | NoSuchMethodException | SecurityException e ) {
-        throw new RuntimeException(
-            "Error creating proxy for " + declaredType + ": " + e , e );
+        throw new RuntimeException("Error creating proxy for " + declaredType + ": " + e, e);
       }
     }
     return proxyInstance;
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/PreparedStatementTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/PreparedStatementTest.java
index 94155e4a385..6d2d195b1df 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/PreparedStatementTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/PreparedStatementTest.java
@@ -258,7 +258,7 @@ public String toString() {
   public void testDefaultGetQueryTimeout() throws SQLException {
     try (PreparedStatement stmt = connection.prepareStatement(SYS_VERSION_SQL)) {
       int timeoutValue = stmt.getQueryTimeout();
-      assertEquals( 0L , timeoutValue );
+      assertEquals(0L, timeoutValue);
     }
   }
 
@@ -288,7 +288,7 @@ public void testValidSetQueryTimeout() throws SQLException {
       int valueToSet = new Random(20150304).nextInt(59)+1;
       logger.info("Setting timeout as {} seconds", valueToSet);
       stmt.setQueryTimeout(valueToSet);
-      assertEquals( valueToSet , stmt.getQueryTimeout() );
+      assertEquals(valueToSet, stmt.getQueryTimeout());
     }
   }
 
@@ -306,7 +306,7 @@ public void testSetQueryTimeoutAsZero() throws SQLException {
         rs.getBytes(1);
         rowCount++;
       }
-      assertEquals( 3 , rowCount );
+      assertEquals(3, rowCount);
     }
   }
 
@@ -406,7 +406,7 @@ public void testNonTriggeredQueryTimeout() throws SQLException {
         rs.getBytes(1);
         rowCount++;
       }
-      assertEquals( 1 , rowCount );
+      assertEquals(1, rowCount);
     }
   }
 
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/StatementTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/StatementTest.java
index 9f4b9adc20c..73757f4013a 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/StatementTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/StatementTest.java
@@ -83,7 +83,7 @@ public static void tearDownStatement() throws SQLException {
   public void testDefaultGetQueryTimeout() throws SQLException {
     try(Statement stmt = connection.createStatement()) {
       int timeoutValue = stmt.getQueryTimeout();
-      assertEquals( 0 , timeoutValue );
+      assertEquals(0, timeoutValue);
     }
   }
 
@@ -116,7 +116,7 @@ public void testValidSetQueryTimeout() throws SQLException {
       int valueToSet = new Random(20150304).nextInt(59)+1;
       logger.info("Setting timeout as {} seconds", valueToSet);
       stmt.setQueryTimeout(valueToSet);
-      assertEquals( valueToSet , stmt.getQueryTimeout() );
+      assertEquals( valueToSet, stmt.getQueryTimeout() );
     }
   }
 
@@ -135,7 +135,7 @@ public void testSetQueryTimeoutAsZero() throws SQLException {
         rs.getBytes(1);
         rowCount++;
       }
-      assertEquals( 3 , rowCount );
+      assertEquals( 3, rowCount );
     }
   }
 
@@ -235,7 +235,7 @@ public void testNonTriggeredQueryTimeout() throws SQLException {
         rs.getBytes(1);
         rowCount++;
       }
-      assertEquals( 1 , rowCount );
+      assertEquals( 1, rowCount );
     }
   }
 
diff --git a/exec/memory/base/src/main/java/org/apache/drill/exec/memory/BaseAllocator.java b/exec/memory/base/src/main/java/org/apache/drill/exec/memory/BaseAllocator.java
index eb51dc3307b..893024a3b7a 100644
--- a/exec/memory/base/src/main/java/org/apache/drill/exec/memory/BaseAllocator.java
+++ b/exec/memory/base/src/main/java/org/apache/drill/exec/memory/BaseAllocator.java
@@ -788,8 +788,7 @@ public static StringBuilder indent(StringBuilder sb, int indent) {
   public static enum Verbosity {
     BASIC(false, false), // only include basic information
     LOG(true, false), // include basic
-    LOG_WITH_STACKTRACE(true, true) //
-    ;
+    LOG_WITH_STACKTRACE(true, true);
 
     public final boolean includeHistoricalLog;
     public final boolean includeStackTraces;
diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
index deed7a71f22..fd2755899dc 100644
--- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
+++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
@@ -135,7 +135,6 @@ void send(RpcOutcomeListener<RECEIVE> listener, C connection, T rpcType, SEND pr
 
         }
       }
-      ;
     }
   }
 
diff --git a/logical/src/main/java/org/apache/drill/common/expression/BooleanOperator.java b/logical/src/main/java/org/apache/drill/common/expression/BooleanOperator.java
index b107bac052a..831821e881f 100644
--- a/logical/src/main/java/org/apache/drill/common/expression/BooleanOperator.java
+++ b/logical/src/main/java/org/apache/drill/common/expression/BooleanOperator.java
@@ -59,7 +59,7 @@ public int getCumulativeCost() {
       i++;
     }
 
-    return (int) (cost / i) ;
+    return cost / i;
   }
 
 }
diff --git a/logical/src/main/java/org/apache/drill/common/expression/IfExpression.java b/logical/src/main/java/org/apache/drill/common/expression/IfExpression.java
index 147129b9537..6a3dbe5010f 100644
--- a/logical/src/main/java/org/apache/drill/common/expression/IfExpression.java
+++ b/logical/src/main/java/org/apache/drill/common/expression/IfExpression.java
@@ -153,7 +153,7 @@ public int getCumulativeCost() {
       i++;
     }
 
-    return (int) (cost / i) ;
+    return cost / i;
   }
 
 }
diff --git a/logical/src/main/java/org/apache/drill/common/expression/NullExpression.java b/logical/src/main/java/org/apache/drill/common/expression/NullExpression.java
index a3465042caf..b36f3e669aa 100644
--- a/logical/src/main/java/org/apache/drill/common/expression/NullExpression.java
+++ b/logical/src/main/java/org/apache/drill/common/expression/NullExpression.java
@@ -53,7 +53,7 @@ public ExpressionPosition getPosition() {
     return Iterators.emptyIterator();
   }
 
-  public int getSelfCost() { return 0 ; }
+  public int getSelfCost() { return 0; }
 
   public int getCumulativeCost() { return 0; }
 
diff --git a/pom.xml b/pom.xml
index a68c0b6e195..2c6fc29a7d8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -397,7 +397,7 @@
           <dependency>
             <groupId>com.puppycrawl.tools</groupId>
             <artifactId>checkstyle</artifactId>
-            <version>5.9</version>
+            <version>7.8.2</version>
           </dependency>
         </dependencies>
         <configuration>
diff --git a/src/main/resources/checkstyle-config.xml b/src/main/resources/checkstyle-config.xml
index 35628d352e8..1ee487845dc 100644
--- a/src/main/resources/checkstyle-config.xml
+++ b/src/main/resources/checkstyle-config.xml
@@ -36,6 +36,10 @@
     <module name="AvoidStarImport"/>
     <module name="NeedBraces"/>
     <module name="PackageDeclaration"/>
+    <module name="EmptyStatement"/>
+    <module name="NoWhitespaceBefore"/>
+    <module name="OneStatementPerLine"/>
+
   </module>
 
   <module name="FileTabCharacter"/>


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services