You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by ab...@apache.org on 2023/02/15 09:53:03 UTC

[druid] branch master updated: Code cleanup & message improvements (#13778)

This is an automated email from the ASF dual-hosted git repository.

abhishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 333196d207 Code cleanup & message improvements (#13778)
333196d207 is described below

commit 333196d20717c9109665f717731c333939af4e7a
Author: Paul Rogers <pa...@users.noreply.github.com>
AuthorDate: Wed Feb 15 01:52:54 2023 -0800

    Code cleanup & message improvements (#13778)
    
    * Misc cleanup edits
    
    Correct spacing
    Add type parameters
    Add toString() methods to formats so tests compare correctly
    IT doc revisions
    Error message edits
    Display UT query results when tests fail
    
    * Edit
    
    * Build fix
    
    * Build fixes
---
 .../data/input/impl/CloudObjectInputSource.java    |   2 +-
 .../druid/data/input/impl/CsvInputFormat.java      |   6 ++
 .../data/input/impl/DelimitedInputFormat.java      |   6 ++
 .../druid/data/input/impl/FlatTextInputFormat.java |  12 +++
 .../java/util/common/granularity/Granularity.java  |   2 -
 .../common/granularity/IntervalsByGranularity.java |   2 +-
 .../util/common/granularity/PeriodGranularity.java |   1 -
 .../apache/druid/segment/column/ColumnType.java    |   2 +-
 .../org/apache/druid/segment/column/ValueType.java |   1 -
 .../druid/indexing/common/task/CompactionTask.java |  77 +++++++++-------
 integration-tests-ex/README.md                     |  24 +----
 .../druid/testsEx/config/ClusterConfigTest.java    |   6 +-
 .../AbstractCloudInputSourceParallelIndexTest.java |   4 +-
 .../AbstractGcsInputSourceParallelIndexTest.java   |   5 +-
 .../testsEx/indexer/AbstractITBatchIndexTest.java  |   2 +-
 .../AbstractS3InputSourceParallelIndexTest.java    |   5 +-
 .../indexer/ITAzureToAzureParallelIndexTest.java   |   2 +-
 .../indexer/ITGcsToGcsParallelIndexTest.java       |   2 +-
 .../ITLocalInputSourceAllFormatSchemalessTest.java |   5 +-
 .../ITLocalInputSourceAllInputFormatTest.java      |   5 +-
 .../testsEx/indexer/ITS3ToS3ParallelIndexTest.java |   2 +-
 .../testsEx/msq/ITAzureSQLBasedIngestionTest.java  |   2 +-
 .../testsEx/msq/ITGcsSQLBasedIngestionTest.java    |   2 +-
 .../msq/ITKeyStatisticsSketchMergeMode.java        |  13 ---
 .../apache/druid/testsEx/msq/ITMSQReindexTest.java |  14 +--
 .../msq/ITMultiStageQueryWorkerFaultTolerance.java |  12 ---
 .../testsEx/msq/ITS3SQLBasedIngestionTest.java     |   2 +-
 .../clients/CoordinatorResourceTestClient.java     |  77 ++++++++++++----
 it.sh                                              |   3 +-
 .../org/apache/druid/guice/ExtensionsLoader.java   |   4 +-
 .../java/org/apache/druid/segment/TestHelper.java  |   2 -
 .../apache/druid/guice/DruidInjectorBuilder.java   |   2 +-
 .../indexing/granularity/GranularitySpec.java      |   2 -
 .../granularity/UniformGranularitySpec.java        |   1 -
 .../org/apache/druid/server/RequestLogLine.java    |  13 ++-
 .../druid/server/http/SelfDiscoveryResource.java   |   5 +-
 .../server/log/FilteredRequestLoggerProvider.java  |   4 +-
 .../druid/server/security/Authenticator.java       |   3 +-
 .../PreResponseAuthorizationCheckFilter.java       |  31 ++++---
 .../server/log/DefaultRequestLogEventTest.java     |   2 +-
 .../server/log/FilteredRequestLoggerTest.java      | 102 +++++++++++----------
 .../druid/server/log/LoggingRequestLoggerTest.java |  13 ++-
 .../druid/server/router/QueryHostFinder.java       |   8 +-
 .../sql/calcite/planner/CalciteRulesManager.java   |  12 +--
 .../sql/calcite/planner/DruidOperatorTable.java    |   6 +-
 .../sql/calcite/schema/DruidSchemaCatalog.java     |  13 +--
 .../sql/calcite/schema/RootSchemaProvider.java     |  21 +++--
 .../druid/sql/calcite/BaseCalciteQueryTest.java    |  59 ++++++++++--
 48 files changed, 342 insertions(+), 259 deletions(-)

diff --git a/core/src/main/java/org/apache/druid/data/input/impl/CloudObjectInputSource.java b/core/src/main/java/org/apache/druid/data/input/impl/CloudObjectInputSource.java
index 3c3e3c6b72..81e7f5fd63 100644
--- a/core/src/main/java/org/apache/druid/data/input/impl/CloudObjectInputSource.java
+++ b/core/src/main/java/org/apache/druid/data/input/impl/CloudObjectInputSource.java
@@ -238,7 +238,7 @@ public abstract class CloudObjectInputSource extends AbstractInputSource
   private void throwIfIllegalArgs(boolean clause) throws IllegalArgumentException
   {
     if (clause) {
-      throw new IllegalArgumentException("exactly one of either uris or prefixes or objects must be specified");
+      throw new IllegalArgumentException("Exactly one of uris, prefixes or objects must be specified");
     }
   }
 }
diff --git a/core/src/main/java/org/apache/druid/data/input/impl/CsvInputFormat.java b/core/src/main/java/org/apache/druid/data/input/impl/CsvInputFormat.java
index ec4c0084ff..03706d1ef4 100644
--- a/core/src/main/java/org/apache/druid/data/input/impl/CsvInputFormat.java
+++ b/core/src/main/java/org/apache/druid/data/input/impl/CsvInputFormat.java
@@ -90,4 +90,10 @@ public class CsvInputFormat extends FlatTextInputFormat
                                        .withSeparator(SEPARATOR)
                                        .build();
   }
+
+  @Override
+  public String toString()
+  {
+    return "CsvInputFormat{" + fieldsToString() + "}";
+  }
 }
diff --git a/core/src/main/java/org/apache/druid/data/input/impl/DelimitedInputFormat.java b/core/src/main/java/org/apache/druid/data/input/impl/DelimitedInputFormat.java
index 599bd2e634..f1a0a48522 100644
--- a/core/src/main/java/org/apache/druid/data/input/impl/DelimitedInputFormat.java
+++ b/core/src/main/java/org/apache/druid/data/input/impl/DelimitedInputFormat.java
@@ -107,4 +107,10 @@ public class DelimitedInputFormat extends FlatTextInputFormat
 
     return Collections.unmodifiableList(result);
   }
+
+  @Override
+  public String toString()
+  {
+    return "DelimitedInputFormat{" + fieldsToString() + "}";
+  }
 }
diff --git a/core/src/main/java/org/apache/druid/data/input/impl/FlatTextInputFormat.java b/core/src/main/java/org/apache/druid/data/input/impl/FlatTextInputFormat.java
index fe877c1f44..bcd9eee5bd 100644
--- a/core/src/main/java/org/apache/druid/data/input/impl/FlatTextInputFormat.java
+++ b/core/src/main/java/org/apache/druid/data/input/impl/FlatTextInputFormat.java
@@ -143,4 +143,16 @@ public abstract class FlatTextInputFormat implements InputFormat
   {
     return Objects.hash(listDelimiter, columns, findColumnsFromHeader, skipHeaderRows, delimiter);
   }
+
+  protected String fieldsToString()
+  {
+    return "FlatTextInputFormat{"
+        + "delimiter=\"" + delimiter
+        + "\"listDelimiter="
+        + listDelimiter == null ? "null" : "\"" + listDelimiter + "\""
+        + ", findColumnsFromHeader=" + findColumnsFromHeader
+        + ", skipHeaderRows=" + skipHeaderRows
+        + ", columns=" + columns
+        + "}";
+  }
 }
diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
index 3d07d0c582..ba48a998b9 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
@@ -41,7 +41,6 @@ import java.util.regex.Pattern;
 
 public abstract class Granularity implements Cacheable
 {
-
   public static Comparator<Granularity> IS_FINER_THAN = new Comparator<Granularity>()
   {
     @Override
@@ -236,7 +235,6 @@ public abstract class Granularity implements Cacheable
     {
       return new IntervalIterator(inputInterval);
     }
-
   }
 
   private class IntervalIterator implements Iterator<Interval>
diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/IntervalsByGranularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/IntervalsByGranularity.java
index ff076d4675..ea742a4ec8 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/granularity/IntervalsByGranularity.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/IntervalsByGranularity.java
@@ -60,7 +60,7 @@ public class IntervalsByGranularity
     if (sortedNonOverlappingIntervals.isEmpty()) {
       return Collections.emptyIterator();
     } else {
-      // The filter after transform & concat is to remove duplicats.
+      // The filter after transform & concat is to remove duplicates.
       // This can happen when condense left intervals that did not overlap but
       // when a larger granularity is applied then they become equal
       // imagine input are 2013-01-01T00Z/2013-01-10T00Z, 2013-01-15T00Z/2013-01-20T00Z.
diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
index b649b9c08f..09960e33cd 100644
--- a/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
+++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/PeriodGranularity.java
@@ -192,7 +192,6 @@ public class PeriodGranularity extends Granularity implements JsonSerializable
       return false;
     }
     return chronology.equals(that.chronology);
-
   }
 
   @Override
diff --git a/core/src/main/java/org/apache/druid/segment/column/ColumnType.java b/core/src/main/java/org/apache/druid/segment/column/ColumnType.java
index 16673cc62c..1f80ccf9ad 100644
--- a/core/src/main/java/org/apache/druid/segment/column/ColumnType.java
+++ b/core/src/main/java/org/apache/druid/segment/column/ColumnType.java
@@ -45,7 +45,7 @@ public class ColumnType extends BaseTypeSignature<ValueType>
   public static final ColumnType STRING = new ColumnType(ValueType.STRING, null, null);
 
   /**
-   * Druid 64-bit integer number primitve type. Values will be represented as Java long or {@link Long}.
+   * Druid 64-bit integer number primitive type. Values will be represented as Java long or {@link Long}.
    *
    * @see ValueType#LONG
    */
diff --git a/core/src/main/java/org/apache/druid/segment/column/ValueType.java b/core/src/main/java/org/apache/druid/segment/column/ValueType.java
index 5cb65423e8..1ffeb5d1ef 100644
--- a/core/src/main/java/org/apache/druid/segment/column/ValueType.java
+++ b/core/src/main/java/org/apache/druid/segment/column/ValueType.java
@@ -98,7 +98,6 @@ public enum ValueType implements TypeDescriptor
    */
   ARRAY;
 
-
   /**
    * Type is a numeric type, not including numeric array types
    */
diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
index da36ff618b..9dc559657f 100644
--- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
+++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
@@ -821,40 +821,51 @@ public class CompactionTask extends AbstractBatchIndexTask
   {
     return Iterables.transform(
         Iterables.filter(dataSegments, dataSegment -> !dataSegment.isTombstone()),
-        dataSegment ->
-            Pair.of(
-                dataSegment,
-                () -> {
-                  try {
-                    final Closer closer = Closer.create();
-                    final File file = segmentCacheManager.getSegmentFiles(dataSegment);
-                    closer.register(() -> segmentCacheManager.cleanup(dataSegment));
-                    final QueryableIndex queryableIndex = closer.register(indexIO.loadIndex(file));
-                    return new ResourceHolder<QueryableIndex>()
-                    {
-                      @Override
-                      public QueryableIndex get()
-                      {
-                        return queryableIndex;
-                      }
-
-                      @Override
-                      public void close()
-                      {
-                        try {
-                          closer.close();
-                        }
-                        catch (IOException e) {
-                          throw new RuntimeException(e);
-                        }
-                      }
-                    };
-                  }
-                  catch (Exception e) {
-                    throw new RuntimeException(e);
-                  }
+        dataSegment -> fetchSegment(dataSegment, segmentCacheManager, indexIO)
+    );
+  }
+
+  // Broken out into a separate function because Some tools can't infer the
+  // pair type, but if the type is given explicitly, IntelliJ inspections raises
+  // an error. Creating a function keeps everyone happy.
+  private static Pair<DataSegment, Supplier<ResourceHolder<QueryableIndex>>> fetchSegment(
+      DataSegment dataSegment,
+      SegmentCacheManager segmentCacheManager,
+      IndexIO indexIO
+  )
+  {
+    return Pair.of(
+        dataSegment,
+        () -> {
+          try {
+            final Closer closer = Closer.create();
+            final File file = segmentCacheManager.getSegmentFiles(dataSegment);
+            closer.register(() -> segmentCacheManager.cleanup(dataSegment));
+            final QueryableIndex queryableIndex = closer.register(indexIO.loadIndex(file));
+            return new ResourceHolder<QueryableIndex>()
+            {
+              @Override
+              public QueryableIndex get()
+              {
+                return queryableIndex;
+              }
+
+              @Override
+              public void close()
+              {
+                try {
+                  closer.close();
                 }
-            )
+                catch (IOException e) {
+                  throw new RuntimeException(e);
+                }
+              }
+            };
+          }
+          catch (Exception e) {
+            throw new RuntimeException(e);
+          }
+        }
     );
   }
 
diff --git a/integration-tests-ex/README.md b/integration-tests-ex/README.md
index 9eb9fb2c8c..b74456f747 100644
--- a/integration-tests-ex/README.md
+++ b/integration-tests-ex/README.md
@@ -32,46 +32,30 @@ an explanation.
 
 ### Build Druid
 
-To make the text a bit simpler, define a variable for the standard settings:
-
-```bash
-export MAVEN_IGNORE=-P skip-static-checks,skip-tests -Dmaven.javadoc.skip=true
-
 ```bash
-mvn clean package -P dist $MAVEN_IGNORE -T1.0C
+./it.sh build
 ```
 
 ### Build the Test Image
 
 ```bash
-cd $DRUID_DEV/integration-tests-ex/image
-mvn install -P test-image $MAVEN_IGNORE
+./it.sh image
 ```
 
 ### Run an IT from the Command Line
 
 ```bash
-mvn verify -P IT-<category> -pl :druid-it-cases $MAVEN_IGNORE
+./it.sh test <category>
 ```
 
 Where `<category>` is one of the test categories.
 
-Or
-
-```bash
-cd $DRUID_DEV/integration-tests-ex/cases
-mvn verify -P skip-static-checks,docker-tests,IT-<category> \
-    -Dmaven.javadoc.skip=true -DskipUTs=true \
-    -pl :druid-it-cases
-```
-
 ### Run an IT from the IDE
 
 Start the cluster:
 
 ```bash
-cd $DRUID_DEV/integration-tests-ex/cases
-./cluster.sh up <category>
+./it.sh up <category>
 ```
 
 Where `<category>` is one of the test categories. Then launch the
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
index 78eab57008..c607333119 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/config/ClusterConfigTest.java
@@ -93,8 +93,10 @@ public class ClusterConfigTest
     assertEquals("myRegion", props.get("druid.test.config.cloudRegion"));
     // System property
     assertEquals("sys", props.get("druid.test.config.sys_prop"));
-    // From user override
-    assertEquals("user", props.get("druid.test.config.user_var"));
+    // From user override. Uncomment to test. Requires the following
+    // file ~/druid-it/Test.env, with contents:
+    // druid_user_var=user
+    //assertEquals("user", props.get("druid.test.config.user_var"));
 
     // Test plumbing through the test config
     Properties properties = new Properties();
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractCloudInputSourceParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractCloudInputSourceParallelIndexTest.java
index df39e43c64..51d8a20e2c 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractCloudInputSourceParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractCloudInputSourceParallelIndexTest.java
@@ -130,7 +130,7 @@ public abstract class AbstractCloudInputSourceParallelIndexTest extends Abstract
    * @param inputSourceType     Input source type (eg : s3, gcs, azure)
    */
   void doTest(
-      Pair<String, List> inputSource,
+      Pair<String, List<?>> inputSource,
       Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair,
       String inputSourceType
   ) throws Exception
@@ -205,7 +205,7 @@ public abstract class AbstractCloudInputSourceParallelIndexTest extends Abstract
    *                            Should also contain expected results for those queries
    * @param inputSourceType     Input source type (eg : s3, gcs, azure)
    */
-  public void doMSQTest(Pair<String, List> inputSource,
+  public void doMSQTest(Pair<String, List<?>> inputSource,
                         String ingestSQLFilePath,
                         String testQueriesFilePath,
                         String inputSourceType
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractGcsInputSourceParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractGcsInputSourceParallelIndexTest.java
index c9ab3e0780..1998183000 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractGcsInputSourceParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractGcsInputSourceParallelIndexTest.java
@@ -23,10 +23,9 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testsEx.utils.GcsTestUtil;
 import org.junit.After;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 
-import static junit.framework.Assert.fail;
-
 /**
  * This class defines methods to upload and delete the data files used by the tests, which will inherit this class.
  * The files are uploaded based on the values set for following environment variables.
@@ -52,7 +51,7 @@ public class AbstractGcsInputSourceParallelIndexTest extends AbstractCloudInputS
     catch (Exception e) {
       LOG.error(e, "Unable to upload files to GCS");
       // Fail if exception
-      fail();
+      Assert.fail(e.getMessage());
     }
   }
 
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractITBatchIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractITBatchIndexTest.java
index 7b983b9cf1..2edb13a049 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractITBatchIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractITBatchIndexTest.java
@@ -196,7 +196,7 @@ public abstract class AbstractITBatchIndexTest extends AbstractIndexerTest
   }
 
   /**
-   * Sumits a sqlTask, waits for task completion.
+   * Submits a sqlTask, waits for task completion.
    */
   protected void submitMSQTaskFromFile(String sqlFilePath, String datasource, Map<String, Object> msqContext) throws Exception
   {
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractS3InputSourceParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractS3InputSourceParallelIndexTest.java
index c92e7b226c..7bb6364367 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractS3InputSourceParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/AbstractS3InputSourceParallelIndexTest.java
@@ -23,13 +23,12 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testsEx.utils.S3TestUtil;
 import org.junit.After;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import static junit.framework.Assert.fail;
-
 /**
  * This class defines methods to upload and delete the data files used by the tests, which will inherit this class.
  * The files are uploaded based on the values set for following environment variables.
@@ -56,7 +55,7 @@ public abstract class AbstractS3InputSourceParallelIndexTest extends AbstractClo
     catch (Exception e) {
       LOG.error(e, "Unable to upload files to s3");
       // Fail if exception
-      fail();
+      Assert.fail(e.getMessage());
     }
   }
 
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITAzureToAzureParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITAzureToAzureParallelIndexTest.java
index 3c972350ab..d33fe77471 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITAzureToAzureParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITAzureToAzureParallelIndexTest.java
@@ -45,7 +45,7 @@ public class ITAzureToAzureParallelIndexTest extends AbstractAzureInputSourcePar
 {
   @Test
   @Parameters(method = "resources")
-  public void testAzureIndexData(Pair<String, List> azureInputSource) throws Exception
+  public void testAzureIndexData(Pair<String, List<?>> azureInputSource) throws Exception
   {
     doTest(azureInputSource, new Pair<>(false, false), "azure");
   }
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITGcsToGcsParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITGcsToGcsParallelIndexTest.java
index d35a80c272..0eaef4e64c 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITGcsToGcsParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITGcsToGcsParallelIndexTest.java
@@ -46,7 +46,7 @@ public class ITGcsToGcsParallelIndexTest extends AbstractGcsInputSourceParallelI
 {
   @Test
   @Parameters(method = "resources")
-  public void testGcsIndexData(Pair<String, List> gcsInputSource) throws Exception
+  public void testGcsIndexData(Pair<String, List<?>> gcsInputSource) throws Exception
   {
     doTest(gcsInputSource, new Pair<>(false, false), "google");
   }
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllFormatSchemalessTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllFormatSchemalessTest.java
index 61deca75ed..f0cf6eb3ca 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllFormatSchemalessTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllFormatSchemalessTest.java
@@ -42,7 +42,7 @@ public class ITLocalInputSourceAllFormatSchemalessTest extends AbstractLocalInpu
   @Test
   public void testAvroInputFormatIndexDataIngestionSpecWithFileSchemaSchemaless() throws Exception
   {
-    List fieldList = ImmutableList.of(
+    List<Object> fieldList = ImmutableList.of(
         ImmutableMap.of("name", "timestamp", "type", "string"),
         ImmutableMap.of("name", "page", "type", "string"),
         ImmutableMap.of("name", "language", "type", "string"),
@@ -60,7 +60,8 @@ public class ITLocalInputSourceAllFormatSchemalessTest extends AbstractLocalInpu
         ImmutableMap.of("name", "deleted", "type", "int"),
         ImmutableMap.of("name", "delta", "type", "int")
     );
-    Map schema = ImmutableMap.of("namespace", "org.apache.druid.data.input",
+    Map<String, Object> schema = ImmutableMap.of(
+                                 "namespace", "org.apache.druid.data.input",
                                  "type", "record",
                                  "name", "wikipedia",
                                  "fields", fieldList);
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllInputFormatTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllInputFormatTest.java
index 641d385865..7dd537e035 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllInputFormatTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITLocalInputSourceAllInputFormatTest.java
@@ -38,7 +38,7 @@ public class ITLocalInputSourceAllInputFormatTest extends AbstractLocalInputSour
   @Test
   public void testAvroInputFormatIndexDataIngestionSpecWithSchema() throws Exception
   {
-    List fieldList = ImmutableList.of(
+    List<Object> fieldList = ImmutableList.of(
         ImmutableMap.of("name", "timestamp", "type", "string"),
         ImmutableMap.of("name", "page", "type", "string"),
         ImmutableMap.of("name", "language", "type", "string"),
@@ -56,7 +56,8 @@ public class ITLocalInputSourceAllInputFormatTest extends AbstractLocalInputSour
         ImmutableMap.of("name", "deleted", "type", "int"),
         ImmutableMap.of("name", "delta", "type", "int")
     );
-    Map schema = ImmutableMap.of("namespace", "org.apache.druid.data.input",
+    Map<String, Object> schema = ImmutableMap.of(
+                                 "namespace", "org.apache.druid.data.input",
                                  "type", "record",
                                  "name", "wikipedia",
                                  "fields", fieldList);
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITS3ToS3ParallelIndexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITS3ToS3ParallelIndexTest.java
index 2116612ea5..ab1c33c459 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITS3ToS3ParallelIndexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITS3ToS3ParallelIndexTest.java
@@ -47,7 +47,7 @@ public class ITS3ToS3ParallelIndexTest extends AbstractS3InputSourceParallelInde
 {
   @Test
   @Parameters(method = "resources")
-  public void testS3IndexData(Pair<String, List> s3InputSource) throws Exception
+  public void testS3IndexData(Pair<String, List<?>> s3InputSource) throws Exception
   {
     doTest(s3InputSource, new Pair<>(false, false), "s3");
   }
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITAzureSQLBasedIngestionTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITAzureSQLBasedIngestionTest.java
index 4b2883b86c..c54e891696 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITAzureSQLBasedIngestionTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITAzureSQLBasedIngestionTest.java
@@ -50,7 +50,7 @@ public class ITAzureSQLBasedIngestionTest extends AbstractAzureInputSourceParall
   @Test
   @Parameters(method = "resources")
   @TestCaseName("Test_{index} ({0})")
-  public void testSQLBasedBatchIngestion(Pair<String, List> s3InputSource)
+  public void testSQLBasedBatchIngestion(Pair<String, List<?>> s3InputSource)
   {
     doMSQTest(s3InputSource, CLOUD_INGEST_SQL, INDEX_QUERIES_FILE, "azure");
   }
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITGcsSQLBasedIngestionTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITGcsSQLBasedIngestionTest.java
index 8ee235430b..a6031c6e1f 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITGcsSQLBasedIngestionTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITGcsSQLBasedIngestionTest.java
@@ -41,7 +41,7 @@ public class ITGcsSQLBasedIngestionTest extends AbstractGcsInputSourceParallelIn
   @Test
   @Parameters(method = "resources")
   @TestCaseName("Test_{index} ({0})")
-  public void testSQLBasedBatchIngestion(Pair<String, List> GcsInputSource)
+  public void testSQLBasedBatchIngestion(Pair<String, List<?>> GcsInputSource)
   {
     doMSQTest(GcsInputSource, CLOUD_INGEST_SQL, INDEX_QUERIES_FILE, "google");
   }
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITKeyStatisticsSketchMergeMode.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITKeyStatisticsSketchMergeMode.java
index cd71e2765e..bccde84099 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITKeyStatisticsSketchMergeMode.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITKeyStatisticsSketchMergeMode.java
@@ -19,7 +19,6 @@
 
 package org.apache.druid.testsEx.msq;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableMap;
 import com.google.inject.Inject;
 import org.apache.druid.java.util.common.StringUtils;
@@ -27,9 +26,7 @@ import org.apache.druid.msq.exec.ClusterStatisticsMergeMode;
 import org.apache.druid.msq.sql.SqlTaskStatus;
 import org.apache.druid.msq.util.MultiStageQueryContext;
 import org.apache.druid.sql.http.SqlQuery;
-import org.apache.druid.testing.IntegrationTestingConfig;
 import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
-import org.apache.druid.testing.clients.SqlResourceTestClient;
 import org.apache.druid.testing.utils.DataLoaderHelper;
 import org.apache.druid.testing.utils.MsqTestQueryHelper;
 import org.apache.druid.testsEx.categories.MultiStageQuery;
@@ -46,15 +43,6 @@ public class ITKeyStatisticsSketchMergeMode
   @Inject
   private MsqTestQueryHelper msqHelper;
 
-  @Inject
-  private SqlResourceTestClient msqClient;
-
-  @Inject
-  private IntegrationTestingConfig config;
-
-  @Inject
-  private ObjectMapper jsonMapper;
-
   @Inject
   private DataLoaderHelper dataLoaderHelper;
 
@@ -63,7 +51,6 @@ public class ITKeyStatisticsSketchMergeMode
 
   private static final String QUERY_FILE = "/multi-stage-query/wikipedia_msq_select_query1.json";
 
-
   @Test
   public void testMsqIngestionParallelMerging() throws Exception
   {
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMSQReindexTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMSQReindexTest.java
index 5f4cd310eb..7db3ea1017 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMSQReindexTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMSQReindexTest.java
@@ -19,13 +19,13 @@
 
 package org.apache.druid.testsEx.msq;
 
-import com.google.inject.Inject;
 import junitparams.Parameters;
 import junitparams.naming.TestCaseName;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.curator.shaded.com.google.common.collect.ImmutableMap;
 import org.apache.druid.java.util.common.logger.Logger;
-import org.apache.druid.testing.utils.MsqTestQueryHelper;
+import org.apache.druid.msq.util.MultiStageQueryContext;
+import org.apache.druid.query.groupby.GroupByQueryConfig;
 import org.apache.druid.testsEx.categories.MultiStageQuery;
 import org.apache.druid.testsEx.config.DruidTestRunner;
 import org.apache.druid.testsEx.indexer.AbstractITBatchIndexTest;
@@ -45,9 +45,6 @@ public class ITMSQReindexTest extends AbstractITBatchIndexTest
 
   private static final Logger LOG = new Logger(ITMSQReindexTest.class);
 
-  @Inject
-  private MsqTestQueryHelper msqHelper;
-
   public static List<List<String>> test_cases()
   {
     return Arrays.asList(
@@ -55,7 +52,6 @@ public class ITMSQReindexTest extends AbstractITBatchIndexTest
         Arrays.asList("wikipedia_merge_index_msq.sql", "wikipedia_merge_reindex_msq.sql", "wikipedia_merge_index_queries.json"),
         Arrays.asList("wikipedia_index_task_with_transform.sql", "wikipedia_reindex_with_transform_msq.sql", "wikipedia_reindex_queries_with_transforms.json")
     );
-
   }
 
   @Test
@@ -65,9 +61,9 @@ public class ITMSQReindexTest extends AbstractITBatchIndexTest
   {
     String indexDatasource = FilenameUtils.removeExtension(sqlFileName);
     String reindexDatasource = FilenameUtils.removeExtension(reIndexSqlFileName);
-    Map<String, Object> context = ImmutableMap.of("finalizeAggregations", false,
-                                                  "maxNumTasks", 5,
-                                                  "groupByEnableMultiValueUnnesting", false);
+    Map<String, Object> context = ImmutableMap.of(MultiStageQueryContext.CTX_FINALIZE_AGGREGATIONS, false,
+                                                  MultiStageQueryContext.CTX_MAX_NUM_TASKS, 5,
+                                                  GroupByQueryConfig.CTX_KEY_ENABLE_MULTI_VALUE_UNNESTING, false);
     try {
       submitMSQTaskFromFile(MSQ_TASKS_DIR + sqlFileName,
                             indexDatasource,
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMultiStageQueryWorkerFaultTolerance.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMultiStageQueryWorkerFaultTolerance.java
index c91dc7b3fa..f7e414d7af 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMultiStageQueryWorkerFaultTolerance.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITMultiStageQueryWorkerFaultTolerance.java
@@ -19,7 +19,6 @@
 
 package org.apache.druid.testsEx.msq;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableMap;
 import com.google.inject.Inject;
 import org.apache.druid.java.util.common.ISE;
@@ -28,9 +27,7 @@ import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.msq.sql.SqlTaskStatus;
 import org.apache.druid.msq.util.MultiStageQueryContext;
-import org.apache.druid.testing.IntegrationTestingConfig;
 import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
-import org.apache.druid.testing.clients.SqlResourceTestClient;
 import org.apache.druid.testing.utils.DataLoaderHelper;
 import org.apache.druid.testing.utils.ITRetryUtil;
 import org.apache.druid.testing.utils.MsqTestQueryHelper;
@@ -53,15 +50,6 @@ public class ITMultiStageQueryWorkerFaultTolerance
   @Inject
   private MsqTestQueryHelper msqHelper;
 
-  @Inject
-  private SqlResourceTestClient msqClient;
-
-  @Inject
-  private IntegrationTestingConfig config;
-
-  @Inject
-  private ObjectMapper jsonMapper;
-
   @Inject
   private DataLoaderHelper dataLoaderHelper;
 
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITS3SQLBasedIngestionTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITS3SQLBasedIngestionTest.java
index e0843a19af..a7219c6b2e 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITS3SQLBasedIngestionTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/msq/ITS3SQLBasedIngestionTest.java
@@ -54,7 +54,7 @@ public class ITS3SQLBasedIngestionTest extends AbstractS3InputSourceParallelInde
   @Test
   @Parameters(method = "resources")
   @TestCaseName("Test_{index} ({0})")
-  public void testSQLBasedBatchIngestion(Pair<String, List> s3InputSource)
+  public void testSQLBasedBatchIngestion(Pair<String, List<?>> s3InputSource)
   {
     doMSQTest(s3InputSource, CLOUD_INGEST_SQL, INDEX_QUERIES_FILE, "s3");
   }
diff --git a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
index 1b96f3fd04..7a42119e6c 100644
--- a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
+++ b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
@@ -77,27 +77,47 @@ public class CoordinatorResourceTestClient
 
   private String getSegmentsMetadataURL(String dataSource)
   {
-    return StringUtils.format("%smetadata/datasources/%s/segments", getCoordinatorURL(), StringUtils.urlEncode(dataSource));
+    return StringUtils.format(
+        "%smetadata/datasources/%s/segments",
+        getCoordinatorURL(),
+        StringUtils.urlEncode(dataSource)
+    );
   }
 
   private String getFullSegmentsMetadataURL(String dataSource)
   {
-    return StringUtils.format("%smetadata/datasources/%s/segments?full", getCoordinatorURL(), StringUtils.urlEncode(dataSource));
+    return StringUtils.format(
+        "%smetadata/datasources/%s/segments?full",
+        getCoordinatorURL(),
+        StringUtils.urlEncode(dataSource)
+    );
   }
 
   private String getIntervalsURL(String dataSource)
   {
-    return StringUtils.format("%sdatasources/%s/intervals", getCoordinatorURL(), StringUtils.urlEncode(dataSource));
+    return StringUtils.format(
+        "%sdatasources/%s/intervals",
+        getCoordinatorURL(),
+        StringUtils.urlEncode(dataSource)
+    );
   }
 
   private String getFullSegmentsURL(String dataSource)
   {
-    return StringUtils.format("%sdatasources/%s/segments?full", getCoordinatorURL(), StringUtils.urlEncode(dataSource));
+    return StringUtils.format(
+        "%sdatasources/%s/segments?full",
+        getCoordinatorURL(),
+        StringUtils.urlEncode(dataSource)
+    );
   }
 
   private String getLoadStatusURL(String dataSource)
   {
-    return StringUtils.format("%sdatasources/%s/loadstatus?forceMetadataRefresh=true&interval=1970-01-01/2999-01-01", getCoordinatorURL(), StringUtils.urlEncode(dataSource));
+    return StringUtils.format(
+        "%sdatasources/%s/loadstatus?forceMetadataRefresh=true&interval=1970-01-01/2999-01-01",
+        getCoordinatorURL(),
+        StringUtils.urlEncode(dataSource)
+    );
   }
 
   /** return a list of the segment dates for the specified data source */
@@ -105,7 +125,10 @@ public class CoordinatorResourceTestClient
   {
     List<String> segments;
     try {
-      StatusResponseHolder response = makeRequest(HttpMethod.GET, getSegmentsMetadataURL(dataSource));
+      StatusResponseHolder response = makeRequest(
+          HttpMethod.GET,
+          getSegmentsMetadataURL(dataSource)
+      );
 
       segments = jsonMapper.readValue(
           response.getContent(), new TypeReference<List<String>>()
@@ -123,7 +146,10 @@ public class CoordinatorResourceTestClient
   {
     List<DataSegment> segments;
     try {
-      StatusResponseHolder response = makeRequest(HttpMethod.GET, getFullSegmentsMetadataURL(dataSource));
+      StatusResponseHolder response = makeRequest(
+          HttpMethod.GET,
+          getFullSegmentsMetadataURL(dataSource)
+      );
 
       segments = jsonMapper.readValue(
           response.getContent(), new TypeReference<List<DataSegment>>()
@@ -173,9 +199,9 @@ public class CoordinatorResourceTestClient
     }
   }
 
-  private Map<String, Integer> getLoadStatus(String dataSorce)
+  private Map<String, Integer> getLoadStatus(String dataSource)
   {
-    String url = getLoadStatusURL(dataSorce);
+    String url = getLoadStatusURL(dataSource);
     Map<String, Integer> status;
     try {
       StatusResponseHolder response = httpClient.go(
@@ -188,7 +214,7 @@ public class CoordinatorResourceTestClient
       }
       if (response.getStatus().getCode() != HttpResponseStatus.OK.getCode()) {
         throw new ISE(
-            "Error while making request to url[%s] status[%s] content[%s]",
+            "Error while making request to url [%s] status [%s] content [%s]",
             url,
             response.getStatus(),
             response.getContent()
@@ -216,7 +242,14 @@ public class CoordinatorResourceTestClient
   public void unloadSegmentsForDataSource(String dataSource)
   {
     try {
-      makeRequest(HttpMethod.DELETE, StringUtils.format("%sdatasources/%s", getCoordinatorURL(), StringUtils.urlEncode(dataSource)));
+      makeRequest(
+          HttpMethod.DELETE,
+          StringUtils.format(
+              "%sdatasources/%s",
+              getCoordinatorURL(),
+              StringUtils.urlEncode(dataSource)
+          )
+      );
     }
     catch (Exception e) {
       throw new RuntimeException(e);
@@ -270,7 +303,7 @@ public class CoordinatorResourceTestClient
 
     if (!response.getStatus().equals(HttpResponseStatus.ACCEPTED)) {
       throw new ISE(
-          "Error while querying[%s] status[%s] content[%s]",
+          "Error while querying [%s] status [%s] content [%s]",
           url,
           response.getStatus(),
           response.getContent()
@@ -280,13 +313,18 @@ public class CoordinatorResourceTestClient
     StatusResponseHolder response2 = httpClient.go(
         new Request(HttpMethod.POST, new URL(url)).setContent(
             "application/json",
-            jsonMapper.writeValueAsBytes(jsonMapper.readValue(CoordinatorResourceTestClient.class.getResourceAsStream(filePath), new TypeReference<Map<Object, Object>>(){}))
+            jsonMapper.writeValueAsBytes(
+                jsonMapper.readValue(
+                    CoordinatorResourceTestClient.class.getResourceAsStream(filePath),
+                    new TypeReference<Map<Object, Object>>(){}
+                )
+            )
         ), responseHandler
     ).get();
 
     if (!response2.getStatus().equals(HttpResponseStatus.ACCEPTED)) {
       throw new ISE(
-          "Error while querying[%s] status[%s] content[%s]",
+          "Error while querying [%s] status [%s] content [%s]",
           url,
           response2.getStatus(),
           response2.getContent()
@@ -320,7 +358,7 @@ public class CoordinatorResourceTestClient
       }
       if (response.getStatus().getCode() != HttpResponseStatus.OK.getCode()) {
         throw new ISE(
-            "Error while making request to url[%s] status[%s] content[%s]",
+            "Error while making request to url [%s] status [%s] content [%s]",
             url,
             response.getStatus(),
             response.getContent()
@@ -328,7 +366,8 @@ public class CoordinatorResourceTestClient
       }
 
       status = jsonMapper.readValue(
-          response.getContent(), new TypeReference<Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>>>()
+          response.getContent(),
+          new TypeReference<Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>>>()
           {
           }
       );
@@ -369,7 +408,7 @@ public class CoordinatorResourceTestClient
 
     if (!response.getStatus().equals(HttpResponseStatus.OK)) {
       throw new ISE(
-          "Error while setting dynamic config[%s] status[%s] content[%s]",
+          "Error while setting dynamic config [%s] status [%s] content [%s]",
           url,
           response.getStatus(),
           response.getContent()
@@ -389,7 +428,7 @@ public class CoordinatorResourceTestClient
 
     if (!response.getStatus().equals(HttpResponseStatus.OK)) {
       throw new ISE(
-          "Error while setting dynamic config[%s] status[%s] content[%s]",
+          "Error while setting dynamic config [%s] status [%s] content [%s]",
           url,
           response.getStatus(),
           response.getContent()
@@ -421,7 +460,7 @@ public class CoordinatorResourceTestClient
       ).get();
       if (!response.getStatus().equals(HttpResponseStatus.OK)) {
         throw new ISE(
-            "Error while making request to url[%s] status[%s] content[%s]",
+            "Error while making request to url [%s] status [%s] content [%s]",
             url,
             response.getStatus(),
             response.getContent()
diff --git a/it.sh b/it.sh
index eee0b6672f..77da6f3680 100755
--- a/it.sh
+++ b/it.sh
@@ -185,7 +185,6 @@ function verify_env_vars {
 
 CMD=$1
 shift
-MAVEN_IGNORE="-P skip-static-checks,skip-tests -Dmaven.javadoc.skip=true"
 
 case $CMD in
   "help" )
@@ -195,7 +194,7 @@ case $CMD in
     mvn -q clean package dependency:go-offline -P dist $MAVEN_IGNORE
     ;;
   "build" )
-    mvn clean package -P dist $MAVEN_IGNORE -T1.0C
+    mvn clean package -P dist,skip-static-checks,skip-tests -Dmaven.javadoc.skip=true -T1.0C $*
     ;;
   "dist" )
     mvn package -P dist $MAVEN_IGNORE -pl :distribution
diff --git a/processing/src/main/java/org/apache/druid/guice/ExtensionsLoader.java b/processing/src/main/java/org/apache/druid/guice/ExtensionsLoader.java
index 0d3b035e7a..0bdbddfa5a 100644
--- a/processing/src/main/java/org/apache/druid/guice/ExtensionsLoader.java
+++ b/processing/src/main/java/org/apache/druid/guice/ExtensionsLoader.java
@@ -319,13 +319,13 @@ public class ExtensionsLoader
       final String serviceImplName = serviceImpl.getClass().getName();
       if (serviceImplName == null) {
         log.warn(
-            "Implementation [%s] was ignored because it doesn't have a canonical name, "
+            "Implementation %s was ignored because it doesn't have a canonical name, "
             + "is it a local or anonymous class?",
             serviceImpl.getClass().getName()
         );
       } else if (!implClassNamesToLoad.contains(serviceImplName)) {
         log.debug(
-            "Adding implementation [%s] for class [%s] from %s extension",
+            "Adding implementation %s for class %s from %s extension",
             serviceImplName,
             serviceClass,
             extensionType
diff --git a/processing/src/test/java/org/apache/druid/segment/TestHelper.java b/processing/src/test/java/org/apache/druid/segment/TestHelper.java
index 4bbf9454c2..6dbde8ab6e 100644
--- a/processing/src/test/java/org/apache/druid/segment/TestHelper.java
+++ b/processing/src/test/java/org/apache/druid/segment/TestHelper.java
@@ -99,7 +99,6 @@ public class TestHelper
     final AnnotationIntrospector introspector = makeAnnotationIntrospector();
     DruidSecondaryModule.setupAnnotationIntrospector(mapper, introspector);
 
-
     mapper.setInjectableValues(
         new InjectableValues.Std()
             .addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
@@ -115,7 +114,6 @@ public class TestHelper
     AnnotationIntrospector introspector = makeAnnotationIntrospector();
     DruidSecondaryModule.setupAnnotationIntrospector(mapper, introspector);
 
-
     mapper.setInjectableValues(
         new InjectableValues.Std()
             .addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
diff --git a/server/src/main/java/org/apache/druid/guice/DruidInjectorBuilder.java b/server/src/main/java/org/apache/druid/guice/DruidInjectorBuilder.java
index b650c10066..5ddce3729c 100644
--- a/server/src/main/java/org/apache/druid/guice/DruidInjectorBuilder.java
+++ b/server/src/main/java/org/apache/druid/guice/DruidInjectorBuilder.java
@@ -186,7 +186,7 @@ public class DruidInjectorBuilder
     // Modules config is optional: it won't be present in tests or clients.
     String moduleClassName = moduleClass.getName();
     if (moduleClassName != null && modulesConfig.getExcludeList().contains(moduleClassName)) {
-      log.info("Not loading module [%s] because it is present in excludeList", moduleClassName);
+      log.info("Not loading module %s because it is present in excludeList", moduleClassName);
       return false;
     }
 
diff --git a/server/src/main/java/org/apache/druid/segment/indexing/granularity/GranularitySpec.java b/server/src/main/java/org/apache/druid/segment/indexing/granularity/GranularitySpec.java
index 148f039b86..75e9ac12ab 100644
--- a/server/src/main/java/org/apache/druid/segment/indexing/granularity/GranularitySpec.java
+++ b/server/src/main/java/org/apache/druid/segment/indexing/granularity/GranularitySpec.java
@@ -49,8 +49,6 @@ public interface GranularitySpec
    */
   Iterable<Interval> sortedBucketIntervals();
 
-
-
   /**
    * Returns user provided intervals as-is state. used for configuring granular path spec
    *
diff --git a/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java
index cd7c7a4d0d..a68d51725f 100644
--- a/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java
+++ b/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java
@@ -105,7 +105,6 @@ public class UniformGranularitySpec extends BaseGranularitySpec
     }
 
     return true;
-
   }
 
   @Override
diff --git a/server/src/main/java/org/apache/druid/server/RequestLogLine.java b/server/src/main/java/org/apache/druid/server/RequestLogLine.java
index 1aba4d317f..4c6f31582c 100644
--- a/server/src/main/java/org/apache/druid/server/RequestLogLine.java
+++ b/server/src/main/java/org/apache/druid/server/RequestLogLine.java
@@ -38,7 +38,7 @@ public class RequestLogLine
 {
   private static final Joiner JOINER = Joiner.on("\t");
 
-  private final Query query;
+  private final Query<?> query;
   private final String sql;
   private final Map<String, Object> sqlQueryContext;
   private final DateTime timestamp;
@@ -46,7 +46,7 @@ public class RequestLogLine
   private final QueryStats queryStats;
 
   private RequestLogLine(
-      @Nullable Query query,
+      @Nullable Query<?> query,
       @Nullable String sql,
       @Nullable Map<String, Object> sqlQueryContext,
       DateTime timestamp,
@@ -62,7 +62,7 @@ public class RequestLogLine
     this.queryStats = Preconditions.checkNotNull(queryStats, "queryStats");
   }
 
-  public static RequestLogLine forNative(Query query, DateTime timestamp, String remoteAddr, QueryStats queryStats)
+  public static RequestLogLine forNative(Query<?> query, DateTime timestamp, String remoteAddr, QueryStats queryStats)
   {
     return new RequestLogLine(query, null, null, timestamp, remoteAddr, queryStats);
   }
@@ -98,14 +98,17 @@ public class RequestLogLine
             remoteAddr,
             "",
             objectMapper.writeValueAsString(queryStats),
-            objectMapper.writeValueAsString(ImmutableMap.of("query", sql, "context", sqlQueryContext))
+            objectMapper.writeValueAsString(ImmutableMap.of(
+                "query", sql == null ? "<unavailable>" : sql,
+                "context", sqlQueryContext
+            ))
         )
     );
   }
 
   @Nullable
   @JsonProperty("query")
-  public Query getQuery()
+  public Query<?> getQuery()
   {
     return query;
   }
diff --git a/server/src/main/java/org/apache/druid/server/http/SelfDiscoveryResource.java b/server/src/main/java/org/apache/druid/server/http/SelfDiscoveryResource.java
index 48c8f7d64d..f82717408c 100644
--- a/server/src/main/java/org/apache/druid/server/http/SelfDiscoveryResource.java
+++ b/server/src/main/java/org/apache/druid/server/http/SelfDiscoveryResource.java
@@ -25,6 +25,7 @@ import com.google.inject.Singleton;
 import com.sun.jersey.spi.container.ResourceFilters;
 import org.apache.druid.discovery.DruidNodeDiscoveryProvider;
 import org.apache.druid.discovery.NodeRole;
+import org.apache.druid.guice.LazySingleton;
 import org.apache.druid.guice.annotations.Self;
 import org.apache.druid.java.util.common.lifecycle.Lifecycle;
 import org.apache.druid.server.DruidNode;
@@ -43,11 +44,11 @@ import java.util.Set;
 import java.util.function.BooleanSupplier;
 
 /**
- * This class is annotated {@link Singleton} rather than {@link org.apache.druid.guice.LazySingleton} because it adds
+ * This class is annotated {@link Singleton} rather than {@link LazySingleton} because it adds
  * a lifecycle handler in the constructor. That should happen before the lifecycle is started, i. e. eagerly during the
  * DI configuration phase.
  */
-@Singleton
+@LazySingleton // To catch implicit instantiations in unit tests
 @Path("/status/selfDiscovered")
 public class SelfDiscoveryResource
 {
diff --git a/server/src/main/java/org/apache/druid/server/log/FilteredRequestLoggerProvider.java b/server/src/main/java/org/apache/druid/server/log/FilteredRequestLoggerProvider.java
index beee88e94b..e48bd0062f 100644
--- a/server/src/main/java/org/apache/druid/server/log/FilteredRequestLoggerProvider.java
+++ b/server/src/main/java/org/apache/druid/server/log/FilteredRequestLoggerProvider.java
@@ -114,7 +114,7 @@ public class FilteredRequestLoggerProvider implements RequestLoggerProvider
     {
       Object queryTime = requestLogLine.getQueryStats().getStats().get("query/time");
       if (queryTime != null && ((Number) queryTime).longValue() >= queryTimeThresholdMs) {
-        Query query = requestLogLine.getQuery();
+        Query<?> query = requestLogLine.getQuery();
         if (query != null && mutedQueryTypes.contains(query.getType())) {
           return;
         }
@@ -127,7 +127,7 @@ public class FilteredRequestLoggerProvider implements RequestLoggerProvider
     {
       Object sqlQueryTime = requestLogLine.getQueryStats().getStats().get("sqlQuery/time");
       if (sqlQueryTime != null && ((Number) sqlQueryTime).longValue() >= sqlQueryTimeThresholdMs) {
-        Query query = requestLogLine.getQuery();
+        Query<?> query = requestLogLine.getQuery();
         if (query != null && mutedQueryTypes.contains(query.getType())) {
           return;
         }
diff --git a/server/src/main/java/org/apache/druid/server/security/Authenticator.java b/server/src/main/java/org/apache/druid/server/security/Authenticator.java
index ab81573db3..0afb9109f0 100644
--- a/server/src/main/java/org/apache/druid/server/security/Authenticator.java
+++ b/server/src/main/java/org/apache/druid/server/security/Authenticator.java
@@ -99,10 +99,9 @@ public interface Authenticator extends ServletFilterHolder
   @Nullable
   AuthenticationResult authenticateJDBCContext(Map<String, Object> context);
 
-
   /**
    * This is used to add some Headers or Authentication token/results that can be used by down stream target host.
-   * Such token can be used to authenticate the user down stream, in cases where to original credenitals
+   * Such token can be used to authenticate the user down stream, in cases where to original credentials
    * are not forwardable as is and therefore the need to attach some authentication tokens by the proxy.
    *
    * @param clientRequest original client request processed by the upstream chain of authenticator
diff --git a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
index a87f21c1a2..d0f543a06d 100644
--- a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
+++ b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
@@ -22,6 +22,7 @@ package org.apache.druid.server.security;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.druid.java.util.common.StringUtils;
 import org.apache.druid.java.util.emitter.EmittingLogger;
+import org.apache.druid.java.util.emitter.service.AlertBuilder;
 import org.apache.druid.query.QueryException;
 import org.apache.druid.query.QueryInterruptedException;
 import org.apache.druid.server.DruidNode;
@@ -66,7 +67,6 @@ public class PreResponseAuthorizationCheckFilter implements Filter
   @Override
   public void init(FilterConfig filterConfig)
   {
-
   }
 
   @Override
@@ -90,7 +90,7 @@ public class PreResponseAuthorizationCheckFilter implements Filter
       // (e.g. OverlordServletProxy), so this is not implemented for now.
       handleAuthorizationCheckError(
           StringUtils.format(
-              "Request did not have an authorization check performed, original response status[%s].",
+              "Request did not have an authorization check performed, original response status [%s].",
               response.getStatus()
           ),
           request,
@@ -110,7 +110,6 @@ public class PreResponseAuthorizationCheckFilter implements Filter
   @Override
   public void destroy()
   {
-
   }
 
   private void handleUnauthenticatedRequest(
@@ -148,16 +147,24 @@ public class PreResponseAuthorizationCheckFilter implements Filter
       HttpServletResponse servletResponse
   )
   {
-    final String queryId = servletResponse.getHeader(QueryResource.QUERY_ID_RESPONSE_HEADER);
-
     // Send out an alert so there's a centralized collection point for seeing errors of this nature
-    log.makeAlert(errorMsg)
-       .addData("uri", servletRequest.getRequestURI())
-       .addData("method", servletRequest.getMethod())
-       .addData("remoteAddr", servletRequest.getRemoteAddr())
-       .addData("remoteHost", servletRequest.getRemoteHost())
-       .addData("queryId", queryId)
-       .emit();
+    AlertBuilder builder = log.makeAlert(errorMsg)
+        .addData("uri", servletRequest.getRequestURI())
+        .addData("method", servletRequest.getMethod())
+        .addData("remoteAddr", servletRequest.getRemoteAddr());
+
+    // Omit the host name if it just repeats the IP address.
+    String remoteHost = servletRequest.getRemoteHost();
+    if (remoteHost != null && !remoteHost.equals(servletRequest.getRemoteAddr())) {
+      builder.addData("remoteHost", remoteHost);
+    }
+
+    // Omit the query ID if there is no ID.
+    final String queryId = servletResponse.getHeader(QueryResource.QUERY_ID_RESPONSE_HEADER);
+    if (queryId != null) {
+      builder.addData("queryId", queryId);
+    }
+    builder.emit();
 
     if (!servletResponse.isCommitted()) {
       try {
diff --git a/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java b/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
index ebeffcf8c9..0f7e00b484 100644
--- a/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
+++ b/server/src/test/java/org/apache/druid/server/log/DefaultRequestLogEventTest.java
@@ -90,7 +90,7 @@ public class DefaultRequestLogEventTest
     final DateTime timestamp = DateTimes.of(2019, 12, 12, 3, 1);
     final String service = "druid-service";
     final String host = "127.0.0.1";
-    final Query query = new TimeseriesQuery(
+    final Query<?> query = new TimeseriesQuery(
         new TableDataSource("dummy"),
         new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))),
         true,
diff --git a/server/src/test/java/org/apache/druid/server/log/FilteredRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/FilteredRequestLoggerTest.java
index 54ee6e2a8f..b788589e22 100644
--- a/server/src/test/java/org/apache/druid/server/log/FilteredRequestLoggerTest.java
+++ b/server/src/test/java/org/apache/druid/server/log/FilteredRequestLoggerTest.java
@@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.inject.ProvisionException;
 import org.apache.druid.guice.JsonConfigurator;
 import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.query.Query;
 import org.apache.druid.query.TableDataSource;
 import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery;
@@ -38,6 +39,7 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
 import javax.validation.Validation;
+
 import java.io.IOException;
 import java.util.Properties;
 
@@ -100,14 +102,28 @@ public class FilteredRequestLoggerTest
     logger.logSqlQuery(sqlRequestLogLine);
   }
 
+  private static class MockLogger implements RequestLogger
+  {
+    private int nativeCount;
+    private int sqlCount;
+
+    @Override
+    public void logNativeQuery(RequestLogLine requestLogLine)
+    {
+      nativeCount++;
+    }
+
+    @Override
+    public void logSqlQuery(RequestLogLine requestLogLine)
+    {
+      sqlCount++;
+    }
+  }
+
   @Test
   public void testNotFilterAboveThreshold() throws IOException
   {
-    RequestLogger delegate = EasyMock.createStrictMock(RequestLogger.class);
-    delegate.logNativeQuery(EasyMock.anyObject());
-    EasyMock.expectLastCall().times(2);
-    delegate.logSqlQuery(EasyMock.anyObject());
-    EasyMock.expectLastCall().times(2);
+    MockLogger delegate = new MockLogger();
 
     FilteredRequestLoggerProvider.FilteredRequestLogger logger = new FilteredRequestLoggerProvider.FilteredRequestLogger(
         delegate,
@@ -116,46 +132,34 @@ public class FilteredRequestLoggerTest
         ImmutableList.of()
     );
 
-    RequestLogLine nativeRequestLogLine = EasyMock.createMock(RequestLogLine.class);
-    EasyMock.expect(nativeRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("query/time", 10000)))
-            .once();
-    EasyMock.expect(nativeRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("query/time", 1000)))
-            .once();
-    EasyMock.expect(nativeRequestLogLine.getQuery())
-            .andReturn(testSegmentMetadataQuery)
-            .times(2);
-
-    RequestLogLine sqlRequestLogLine = EasyMock.createMock(RequestLogLine.class);
-    EasyMock.expect(sqlRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("sqlQuery/time", 10000)))
-            .once();
-    EasyMock.expect(sqlRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("sqlQuery/time", 2000)))
-            .once();
-    EasyMock.expect(sqlRequestLogLine.getQuery())
-            .andReturn(testSegmentMetadataQuery)
-            .times(2);
+    RequestLogLine nativeRequestLogLine = RequestLogLine.forNative(
+        testSegmentMetadataQuery,
+        DateTimes.nowUtc(), // Not used
+        null, // Not used
+        new QueryStats(ImmutableMap.of("query/time", 1000))
+    );
 
-    EasyMock.replay(nativeRequestLogLine, sqlRequestLogLine, delegate);
+    RequestLogLine sqlRequestLogLine = RequestLogLine.forSql(
+        "SELECT * FROM foo",
+        null,
+        DateTimes.nowUtc(), // Not used
+        null,  // Not used
+        new QueryStats(ImmutableMap.of("sqlQuery/time", 2000))
+    );
 
     logger.logNativeQuery(nativeRequestLogLine);
     logger.logNativeQuery(nativeRequestLogLine);
     logger.logSqlQuery(sqlRequestLogLine);
     logger.logSqlQuery(sqlRequestLogLine);
 
-    EasyMock.verify(nativeRequestLogLine, sqlRequestLogLine, delegate);
+    Assert.assertEquals(2, delegate.nativeCount);
+    Assert.assertEquals(2, delegate.sqlCount);
   }
 
   @Test
   public void testNotFilterAboveThresholdSkipSegmentMetadata() throws IOException
   {
-    RequestLogger delegate = EasyMock.createStrictMock(RequestLogger.class);
-    delegate.logNativeQuery(EasyMock.anyObject());
-    EasyMock.expectLastCall().andThrow(new IOException());
-    delegate.logSqlQuery(EasyMock.anyObject());
-    EasyMock.expectLastCall().andThrow(new IOException());
+    MockLogger delegate = new MockLogger();
 
     FilteredRequestLoggerProvider.FilteredRequestLogger logger = new FilteredRequestLoggerProvider.FilteredRequestLogger(
         delegate,
@@ -164,26 +168,26 @@ public class FilteredRequestLoggerTest
         ImmutableList.of(Query.SEGMENT_METADATA)
     );
 
-    RequestLogLine nativeRequestLogLine = EasyMock.createMock(RequestLogLine.class);
-    EasyMock.expect(nativeRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("query/time", 10000)))
-            .once();
-    EasyMock.expect(nativeRequestLogLine.getQuery())
-            .andReturn(testSegmentMetadataQuery)
-            .once();
-
-    RequestLogLine sqlRequestLogLine = EasyMock.createMock(RequestLogLine.class);
-    EasyMock.expect(sqlRequestLogLine.getQueryStats())
-            .andReturn(new QueryStats(ImmutableMap.of("sqlQuery/time", 10000)))
-            .once();
-    EasyMock.expect(sqlRequestLogLine.getQuery())
-            .andReturn(testSegmentMetadataQuery)
-            .once();
+    RequestLogLine nativeRequestLogLine = RequestLogLine.forNative(
+        testSegmentMetadataQuery,
+        DateTimes.nowUtc(), // Not used
+        null, // Not used
+        new QueryStats(ImmutableMap.of("query/time", 10000))
+    );
 
-    EasyMock.replay(nativeRequestLogLine, sqlRequestLogLine, delegate);
+    RequestLogLine sqlRequestLogLine = RequestLogLine.forSql(
+        "SELECT * FROM foo",
+        null,
+        DateTimes.nowUtc(), // Not used
+        null,  // Not used
+        new QueryStats(ImmutableMap.of("sqlQuery/time", 10000))
+    );
 
     logger.logNativeQuery(nativeRequestLogLine);
     logger.logSqlQuery(sqlRequestLogLine);
+
+    Assert.assertEquals(0, delegate.nativeCount);
+    Assert.assertEquals(1, delegate.sqlCount);
   }
 
   @Test
diff --git a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java
index 87b02327dc..f3dd1eb680 100644
--- a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java
+++ b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java
@@ -72,7 +72,7 @@ public class LoggingRequestLoggerTest
   final DateTime timestamp = DateTimes.of("2016-01-01T00:00:00Z");
   final String remoteAddr = "some.host.tld";
   final Map<String, Object> queryContext = ImmutableMap.of("foo", "bar");
-  final Query query = new FakeQuery(
+  final Query<?> query = new FakeQuery(
       new TableDataSource("datasource"),
       new QuerySegmentSpec()
       {
@@ -90,7 +90,7 @@ public class LoggingRequestLoggerTest
       }, false, queryContext
   );
 
-  final Query nestedQuery = new FakeQuery(
+  final Query<?> nestedQuery = new FakeQuery(
       new QueryDataSource(query),
       new QuerySegmentSpec()
       {
@@ -320,9 +320,14 @@ public class LoggingRequestLoggerTest
 }
 
 @JsonTypeName("fake")
-class FakeQuery extends BaseQuery
+class FakeQuery extends BaseQuery<Object>
 {
-  public FakeQuery(DataSource dataSource, QuerySegmentSpec querySegmentSpec, boolean descending, Map context)
+  public FakeQuery(
+      DataSource dataSource,
+      QuerySegmentSpec querySegmentSpec,
+      boolean descending,
+      Map<String, Object> context
+  )
   {
     super(dataSource, querySegmentSpec, descending, context);
   }
diff --git a/services/src/main/java/org/apache/druid/server/router/QueryHostFinder.java b/services/src/main/java/org/apache/druid/server/router/QueryHostFinder.java
index 6a7a0785fc..f24b41bc6b 100644
--- a/services/src/main/java/org/apache/druid/server/router/QueryHostFinder.java
+++ b/services/src/main/java/org/apache/druid/server/router/QueryHostFinder.java
@@ -76,12 +76,12 @@ public class QueryHostFinder
     Server chosenServer = avaticaConnectionBalancer.pickServer(getAllServers(), connectionId);
     assertServerFound(
         chosenServer,
-        "No server found for Avatica request with connectionId[%s]",
+        "No server found for Avatica request with connectionId [%s]",
         connectionId
     );
 
     log.debug(
-        "Balancer class [%s] sending request with connectionId[%s] to server: %s",
+        "Balancer class [%s] sending request with connectionId [%s] to server: %s",
         avaticaConnectionBalancer.getClass(),
         connectionId,
         chosenServer.getHost()
@@ -121,7 +121,7 @@ public class QueryHostFinder
 
     if (server == null) {
       log.error(
-          "No server found for serviceName[%s]. Using backup",
+          "No server found for serviceName [%s]. Using backup",
           serviceName
       );
 
@@ -129,7 +129,7 @@ public class QueryHostFinder
 
       if (server == null) {
         log.error(
-            "No backup found for serviceName[%s]. Using default[%s]",
+            "No backup found for serviceName [%s]. Using default [%s]",
             serviceName,
             hostSelector.getDefaultServiceName()
         );
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
index e2b91f7d79..a50b63596b 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java
@@ -98,7 +98,7 @@ public class CalciteRulesManager
   // Calcite 1.23.0 fixes this issue by not consider expression as reduced if this case happens. However, while
   // we are still using Calcite 1.21.0, a workaround is to limit the number of pattern matches to avoid infinite loop.
   private static final String HEP_DEFAULT_MATCH_LIMIT_CONFIG_STRING = "druid.sql.planner.hepMatchLimit";
-  private final int HEP_DEFAULT_MATCH_LIMIT = Integer.valueOf(
+  private static final int HEP_DEFAULT_MATCH_LIMIT = Integer.valueOf(
       System.getProperty(HEP_DEFAULT_MATCH_LIMIT_CONFIG_STRING, "1200")
   );
 
@@ -110,7 +110,7 @@ public class CalciteRulesManager
   //    functions).
   // 3) JoinCommuteRule (we don't support reordering joins yet).
   // 4) JoinPushThroughJoinRule (we don't support reordering joins yet).
-  private final List<RelOptRule> BASE_RULES =
+  private static final List<RelOptRule> BASE_RULES =
       ImmutableList.of(
           AggregateStarTableRule.INSTANCE,
           AggregateStarTableRule.INSTANCE2,
@@ -133,7 +133,7 @@ public class CalciteRulesManager
       );
 
   // Rules for scanning via Bindable, embedded directly in RelOptUtil's registerDefaultRules.
-  private final List<RelOptRule> DEFAULT_BINDABLE_RULES =
+  private static final List<RelOptRule> DEFAULT_BINDABLE_RULES =
       ImmutableList.of(
           Bindables.BINDABLE_TABLE_SCAN_RULE,
           ProjectTableScanRule.INSTANCE,
@@ -145,7 +145,7 @@ public class CalciteRulesManager
   // 1) ReduceExpressionsRule.JOIN_INSTANCE
   //    Removed by https://github.com/apache/druid/pull/9941 due to issue in https://github.com/apache/druid/issues/9942
   //    TODO: Re-enable when https://github.com/apache/druid/issues/9942 is fixed
-  private final List<RelOptRule> REDUCTION_RULES =
+  private static final List<RelOptRule> REDUCTION_RULES =
       ImmutableList.of(
           ReduceExpressionsRule.PROJECT_INSTANCE,
           ReduceExpressionsRule.FILTER_INSTANCE,
@@ -161,7 +161,7 @@ public class CalciteRulesManager
   // Omit DateRangeRules due to https://issues.apache.org/jira/browse/CALCITE-1601
   // Omit UnionMergeRule since it isn't very effective given how Druid unions currently operate and is potentially
   // expensive in terms of planning time.
-  private final List<RelOptRule> ABSTRACT_RULES =
+  private static final List<RelOptRule> ABSTRACT_RULES =
       ImmutableList.of(
           AggregateProjectPullUpConstantsRule.INSTANCE2,
           UnionPullUpConstantsRule.INSTANCE,
@@ -189,7 +189,7 @@ public class CalciteRulesManager
   // 4) FilterJoinRule.FILTER_ON_JOIN and FilterJoinRule.JOIN
   //    Removed by https://github.com/apache/druid/pull/9773 due to issue in https://github.com/apache/druid/issues/9843
   //    TODO: Re-enable when https://github.com/apache/druid/issues/9843 is fixed
-  private final List<RelOptRule> ABSTRACT_RELATIONAL_RULES =
+  private static final List<RelOptRule> ABSTRACT_RELATIONAL_RULES =
       ImmutableList.of(
           AbstractConverter.ExpandConversionRule.INSTANCE,
           AggregateRemoveRule.INSTANCE,
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
index a5c5fd1361..45b808d231 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java
@@ -497,11 +497,7 @@ public class DruidOperatorTable implements SqlOperatorTable
       final SqlNameMatcher nameMatcher
   )
   {
-    if (opName == null) {
-      return;
-    }
-
-    if (opName.names.size() != 1) {
+    if (opName == null || opName.names.size() != 1) {
       return;
     }
 
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchemaCatalog.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchemaCatalog.java
index 34f15af4ba..d81426af08 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchemaCatalog.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchemaCatalog.java
@@ -47,8 +47,8 @@ public class DruidSchemaCatalog
   private final Map<String, NamedSchema> namedSchemas;
 
   public DruidSchemaCatalog(
-      SchemaPlus rootSchema,
-      Map<String, NamedSchema> schemas
+      final SchemaPlus rootSchema,
+      final Map<String, NamedSchema> schemas
   )
   {
     this.rootSchema = rootSchema;
@@ -102,13 +102,10 @@ public class DruidSchemaCatalog
   @Nullable
   public String getResourceType(String schema, String resourceName)
   {
-    if (namedSchemas.containsKey(schema)) {
-      return namedSchemas.get(schema).getSchemaResourceType(resourceName);
-    }
-    return null;
+    final NamedSchema namedSchema = namedSchemas.get(schema);
+    return namedSchema == null ? null : namedSchema.getSchemaResourceType(resourceName);
   }
 
-
   @Override
   public boolean equals(Object o)
   {
@@ -118,7 +115,7 @@ public class DruidSchemaCatalog
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
-    DruidSchemaCatalog that = (DruidSchemaCatalog) o;
+    final DruidSchemaCatalog that = (DruidSchemaCatalog) o;
     return rootSchema.equals(that.rootSchema) && namedSchemas.equals(that.namedSchemas);
   }
 
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/RootSchemaProvider.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/RootSchemaProvider.java
index 0c61763358..f7f4f660e2 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/RootSchemaProvider.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/RootSchemaProvider.java
@@ -32,27 +32,22 @@ import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
- * Provides the RootSchema for calcite with
+ * Provides the RootSchema for Calcite with
  * - metadata schema disabled because it's not needed
- * - caching disabled because druid's caching is better.
+ * - caching disabled because Druid's caching is better.
  *
  * All the provided schema are added to the rootSchema.
  */
 public class RootSchemaProvider implements Provider<DruidSchemaCatalog>
 {
   private final Set<NamedSchema> namedSchemas;
+  private final Map<String, NamedSchema> schemasByName;
 
   @Inject
   RootSchemaProvider(Set<NamedSchema> namedSchemas)
   {
     this.namedSchemas = namedSchemas;
-  }
-
-  @Override
-  public DruidSchemaCatalog get()
-  {
-    final SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus();
-    final Map<String, NamedSchema> schemasByName = Maps.newHashMapWithExpectedSize(namedSchemas.size());
+    schemasByName = Maps.newHashMapWithExpectedSize(namedSchemas.size());
     for (NamedSchema schema : namedSchemas) {
       if (schemasByName.containsKey(schema.getSchemaName())) {
         throw new ISE(
@@ -61,6 +56,14 @@ public class RootSchemaProvider implements Provider<DruidSchemaCatalog>
         );
       }
       schemasByName.put(schema.getSchemaName(), schema);
+    }
+  }
+
+  @Override
+  public DruidSchemaCatalog get()
+  {
+    final SchemaPlus rootSchema = CalciteSchema.createRootSchema(false, false).plus();
+    for (NamedSchema schema : namedSchemas) {
       rootSchema.add(schema.getSchemaName(), schema.getSchema());
     }
     return new DruidSchemaCatalog(rootSchema, ImmutableMap.copyOf(schemasByName));
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
index ddbe3e533b..4d511dcc9f 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
@@ -116,6 +116,7 @@ import org.junit.rules.TemporaryFolder;
 
 import javax.annotation.Nullable;
 import java.io.IOException;
+import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -843,7 +844,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
 
     public CalciteTestConfig()
     {
-
     }
 
     public CalciteTestConfig(boolean isRunningMSQ)
@@ -851,7 +851,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
       this.isRunningMSQ = isRunningMSQ;
     }
 
-
     @Override
     public QueryLogHook queryLogHook()
     {
@@ -1034,12 +1033,11 @@ public class BaseCalciteQueryTest extends CalciteTestBase
     );
   }
 
-
   /**
    * Override not just the outer query context, but also the contexts of all subqueries.
    * @return
    */
-  public static <T> Query recursivelyClearContext(final Query<T> query, ObjectMapper queryJsonMapper)
+  public static <T> Query<?> recursivelyClearContext(final Query<T> query, ObjectMapper queryJsonMapper)
   {
     try {
       Query<T> newQuery = query.withDataSource(recursivelyClearContext(query.getDataSource(), queryJsonMapper));
@@ -1211,8 +1209,57 @@ public class BaseCalciteQueryTest extends CalciteTestBase
     @Override
     public void verify(String sql, List<Object[]> results)
     {
-      Assert.assertEquals(StringUtils.format("result count: %s", sql), expectedResults.size(), results.size());
-      assertResultsEquals(sql, expectedResults, results);
+      try {
+        Assert.assertEquals(StringUtils.format("result count: %s", sql), expectedResults.size(), results.size());
+        assertResultsEquals(sql, expectedResults, results);
+      }
+      catch (AssertionError e) {
+        displayResults(results);
+        throw e;
+      }
+    }
+  }
+
+  /**
+   * Dump the expected results in the form of the elements of a Java array which
+   * can be used to validate the results. This is a convenient way to create the
+   * expected results: let the test fail with empty results. The actual results
+   * are printed to the console. Copy them into the test.
+   */
+  public static void displayResults(List<Object[]> results)
+  {
+    PrintStream out = System.out;
+    out.println("-- Actual results --");
+    for (int rowIndex = 0; rowIndex < results.size(); rowIndex++) {
+      Object[] row = results.get(rowIndex);
+      out.print("new Object[] {");
+      for (int colIndex = 0; colIndex < row.length; colIndex++) {
+        Object col = row[colIndex];
+        if (colIndex > 0) {
+          out.print(", ");
+        }
+        if (col == null) {
+          out.print("null");
+        } else if (col instanceof String) {
+          out.print("\"");
+          out.print(col);
+          out.print("\"");
+        } else if (col instanceof Long) {
+          out.print(col);
+          out.print("L");
+        } else if (col instanceof Double) {
+          out.print(col);
+          out.print("D");
+        } else {
+          out.print(col);
+        }
+      }
+      out.print("}");
+      if (rowIndex < results.size() - 1) {
+        out.print(",");
+      }
+      out.println();
     }
+    out.println("----");
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org