You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by gi...@apache.org on 2019/08/07 16:52:55 UTC

[incubator-druid] branch master updated: Speedup Travis CI jobs (#8240)

This is an automated email from the ASF dual-hosted git repository.

gian pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 05b44e3  Speedup Travis CI jobs (#8240)
05b44e3 is described below

commit 05b44e3467b423be0cb2fd449d799241898fa5b0
Author: Chi Cao Minh <ch...@imply.io>
AuthorDate: Wed Aug 7 09:52:42 2019 -0700

    Speedup Travis CI jobs (#8240)
    
    Reorganize Travis CI jobs into smaller faster (and more) jobs. Add
    various maven options to skip unnecessary work and refactored Travis CI
    job definitions to follow DRY.
    
    Detailed changes:
    
    .travis.yml
    - Refactor build logic to get rid of copy-and-paste logic
    - Skip static checks and enable parallelism for maven install
    - Split static analysis into different jobs to ease triage
    - Use "name" attribute instead of NAME environment variable
    - Split "indexing" and "web console" out of "other modules test"
    - Split 2 integration test jobs into multiple smaller jobs
    
    build.sh
    - Enable parallelism
    - Disable more static checks
    
    travis_script_integration.sh
    travis_script_integration_part2.sh
    integration-tests/README.md
    - Use TestNG groups instead of shell scripts and move definition of jobs
      into Travis CI yaml
    
    integration-tests/pom.xml
    - Show elapsed time of individual tests to aid in future rebalancing of
      Travis CI integration test jobs run time
    
    TestNGGroup.java
    - Use TestNG groups to make it easy to have multiple Travis CI
      integration test jobs. TestNG groups also make it easier to have an
      "other" integration test group and make it less likely a test will
      accidentally not be included in a CI job.
    
    IT*Test.java
    AbstractITBatchIndexTest.java
    AbstractKafkaIndexerTest.java
    - Add TestNG group
    - Fix various IntelliJ inspection warnings
    - Reduce scope of helper methods since the TestNG group annotation on
      the class makes TestNG consider all public methods as test methods
    
    pom.xml
    - Allow enforce plugin to be run from command-line
    - Bump resources plugin version so that "[debug] execute contextualize"
      output is correctly suppressed by "mvn -q"
    - Bump exec plugin version so that skip property is renamed from "skip"
      to "exec.skip"
    
    web-console/pom.xml
    - Add property to allow disabling javascript-related work. This property
      is overridden in Travis CI to speed up the jobs.
---
 .travis.yml                                        | 246 ++++++++++++---------
 build.sh                                           |  21 +-
 ci/travis_script_integration.sh                    |  26 ---
 ci/travis_script_integration_part2.sh              |  26 ---
 integration-tests/README.md                        |   5 -
 integration-tests/pom.xml                          |   1 +
 ...OverlordProxyAuthTest.java => TestNGGroup.java} |  33 ++-
 .../druid/tests/hadoop/ITHadoopIndexTest.java      |  14 +-
 .../tests/indexer/AbstractITBatchIndexTest.java    |   7 +-
 .../indexer/AbstractITRealtimeIndexTaskTest.java   |  30 +--
 .../tests/indexer/AbstractKafkaIndexerTest.java    |  72 +++---
 .../ITAppenderatorDriverRealtimeIndexTaskTest.java |  13 +-
 .../druid/tests/indexer/ITCompactionTaskTest.java  |  15 +-
 .../apache/druid/tests/indexer/ITIndexerTest.java  |  18 +-
 .../tests/indexer/ITKafkaIndexingServiceTest.java  |   5 +-
 .../ITKafkaIndexingServiceTransactionalTest.java   |   2 +
 .../tests/indexer/ITNestedQueryPushDownTest.java   |   5 +-
 .../druid/tests/indexer/ITParallelIndexTest.java   |  22 +-
 .../tests/indexer/ITRealtimeIndexTaskTest.java     |  13 +-
 .../indexer/ITSystemTableBatchIndexTaskTest.java   |  12 +-
 .../druid/tests/indexer/ITUnionQueryTest.java      |  17 +-
 .../druid/tests/query/ITSystemTableQueryTest.java  |   2 +
 .../druid/tests/query/ITTwitterQueryTest.java      |  13 +-
 .../druid/tests/query/ITWikipediaQueryTest.java    |   2 +
 .../security/ITBasicAuthConfigurationTest.java     |  33 +--
 .../ITCoordinatorOverlordProxyAuthTest.java        |   2 +
 .../org/apache/druid/tests/security/ITTLSTest.java |   2 +
 pom.xml                                            |   6 +-
 web-console/pom.xml                                |   8 +
 29 files changed, 315 insertions(+), 356 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index a8aca05..ebfa406 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -25,109 +25,141 @@ cache:
   directories:
     - $HOME/.m2
 
+env:
+  global:
+    - DOCKER_IP=127.0.0.1  # for integration tests
+    - MVN="mvn -B"
+    - > # Various options to make execution of maven goals faster (e.g., mvn install)
+      MAVEN_SKIP="
+      -Danimal.sniffer.skip=true
+      -Dcheckstyle.skip=true
+      -Ddruid.console.skip=true
+      -Denforcer.skip=true
+      -Dforbiddenapis.skip=true
+      -Dmaven.javadoc.skip=true
+      -Dpmd.skip=true
+      -Dspotbugs.skip=true
+      "
+
+# Add various options to make 'mvn install' fast and skip javascript compile (-Ddruid.console.skip=true) since it is not
+# needed. Use travis_retry to address intermittent connection timeouts when resolving the SIGAR dependency.
+install: MAVEN_OPTS='-Xmx3000m' travis_retry $MVN clean install -q -ff ${MAVEN_SKIP} -DskipTests -T1.0C
+
 matrix:
   include:
-      # Java 11 build
-    - jdk: openjdk11
-
-      # license checks
-    - env:
-       - NAME="license checks"
-      install: true
-      script: MAVEN_OPTS='-Xmx3000m' mvn clean verify -Prat -DskipTests -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -Drat.consoleOutput=true
-
-      # strict compilation
-    - env:
-        - NAME="strict compilation"
-      install: true
+    - name: "java 11 build"
+      jdk: openjdk11
+      script: $MVN test -pl '!web-console' ${MAVEN_SKIP}
+
+    - name: "animal sniffer checks"
+      script: $MVN animal-sniffer:check --fail-at-end
+
+    - name: "checkstyle"
+      script: $MVN checkstyle:checkstyle --fail-at-end
+
+    - name: "enforcer checks"
+      script: $MVN enforcer:enforce --fail-at-end
+
+    - name: "forbidden api checks"
+      script: $MVN forbiddenapis:check forbiddenapis:testCheck --fail-at-end
+
+    - name: "pmd checks"
+      script: $MVN pmd:check --fail-at-end  # TODO: consider adding pmd:cpd-check
+
+    - name: "spotbugs checks"
+      script: $MVN spotbugs:check --fail-at-end -pl '!benchmarks'
+
+    - name: "license checks"
+      install: skip
+      script: >
+        $MVN apache-rat:check -Prat --fail-at-end
+        -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
+        -Drat.consoleOutput=true
+
+    - name: "strict compilation"
+      install: skip
       # Strict compilation requires more than 2 GB
-      script: MAVEN_OPTS='-Xmx3000m' mvn clean -Pstrict -pl '!benchmarks' compile test-compile spotbugs:check -B --fail-at-end
+      script: >
+        MAVEN_OPTS='-Xmx3000m' $MVN clean -Pstrict compile test-compile --fail-at-end
+        -pl '!benchmarks' ${MAVEN_SKIP} -DskipTests
 
-    # packaging check
-    - env:
-        - NAME="packaging check"
-      before_install:
+    - name: "packaging check"
+      before_script:
         - sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y
-        - pip3 install wheel # install wheel first explicitly
+        - pip3 install wheel  # install wheel first explicitly
         - pip3 install pyyaml
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
       script: ./build.sh
 
-      # processing module test
-    - env:
-        - NAME="processing module test"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
-      script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -pl processing
+    - name: "processing module test"
+      env: &processing_env
+      - MAVEN_PROJECTS='processing'
+      before_script: &setup_java_test
+        - unset _JAVA_OPTIONS
+      script: &run_java_test
+        # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when
+        # resolving the SIGAR dependency.
+        - >
+          MAVEN_OPTS='-Xmx800m' $MVN test -pl ${MAVEN_PROJECTS}
+          ${MAVEN_SKIP} -Dremoteresources.skip=true
         - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
         - free -m
 
-      # processing module tests with SQL Compatibility enabled
-    - env:
-        - NAME="processing module test with SQL Compatibility"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
-      script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -Ddruid.generic.useDefaultValueForNull=false -pl processing
+    - name: "processing module test (SQL Compatibility)"
+      env: *processing_env
+      before_script: *setup_java_test
+      script: &run_java_sql_compat_test
+        # Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when
+        # resolving the SIGAR dependency.
+        - >
+          MAVEN_OPTS='-Xmx800m' $MVN test -pl ${MAVEN_PROJECTS} -Ddruid.generic.useDefaultValueForNull=false
+          ${MAVEN_SKIP} -Dremoteresources.skip=true
         - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
         - free -m
 
-      # server module test
-    - env:
-        - NAME="server module test"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
-      script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -pl server
-
-      # server module test with SQL Compatibility enabled
-    - env:
-        - NAME="server module test with SQL Compatibility enabled"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
+    - name: "indexing modules test"
+      env: &indexing_env
+      - MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service'
+      before_script: *setup_java_test
+      script: *run_java_test
+
+    - name: "indexing modules test (SQL Compatibility)"
+      env: *indexing_env
+      before_script: *setup_java_test
+      script: *run_java_sql_compat_test
+
+    - name: "server module test"
+      env: &server_env
+        - MAVEN_PROJECTS='server'
+      before_script: *setup_java_test
+      script: *run_java_test
+
+    - name: "server module test (SQL Compatibility)"
+      env: *server_env
+      before_script: *setup_java_test
+      script: *run_java_sql_compat_test
+
+    - name: "other modules test"
+      env: &other_env
+        - MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console'
+      before_script: *setup_java_test
+      script: *run_java_test
+
+    - name: "other modules test (SQL Compatibility)"
+      env: *other_env
+      before_script: *setup_java_test
+      script: *run_java_sql_compat_test
+
+    - name: "web console"
       script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -pl server -Ddruid.generic.useDefaultValueForNull=false
+        - $MVN test -pl 'web-console'
 
-
-      # other modules test
-    - env:
-        - NAME="other modules test"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
-      script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -pl '!processing,!server'
-        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
-        - free -m
-
-      # other modules test with SQL Compatibility enabled
-    - env:
-        - NAME="other modules test with SQL Compatibility"
-      install: MAVEN_OPTS='-Xmx3000m' mvn install -q -ff -DskipTests -B
-      before_script: unset _JAVA_OPTIONS
-      script:
-        # Set MAVEN_OPTS for Surefire launcher
-        - MAVEN_OPTS='-Xmx800m' mvn test -B -Ddruid.generic.useDefaultValueForNull=false -pl '!processing,!server'
-        - sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
-        - free -m
-
-      # run integration tests
-    - services:
+    - name: "batch index integration test"
+      services: &integration_test_services
         - docker
-      env:
-        - NAME="integration test part 1"
-        - DOCKER_IP=127.0.0.1
-      install:
-        # Only errors will be shown with the -q option. This is to avoid generating too many logs which make travis build failed.
-        - mvn install -q -ff -DskipTests -B
-      script:
-        - $TRAVIS_BUILD_DIR/ci/travis_script_integration.sh
-      after_failure:
+      env: TESTNG_GROUPS='-Dgroups=batch-index'
+      script: &run_integration_test
+        - $MVN verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${MAVEN_SKIP}
+      after_failure: &integration_test_diags
         - for v in ~/shared/logs/*.log ; do
           echo $v logtail ======================== ; tail -100 $v ;
           done
@@ -136,22 +168,26 @@ matrix:
           docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
           done
 
-      # run integration tests
-    - services:
-        - docker
-      env:
-        - NAME="integration test part 2"
-        - DOCKER_IP=127.0.0.1
-      install:
-        # Only errors will be shown with the -q option. This is to avoid generating too many logs which make travis build failed.
-        - mvn install -q -ff -DskipTests -B
-      script:
-        - $TRAVIS_BUILD_DIR/ci/travis_script_integration_part2.sh
-      after_failure:
-        - for v in ~/shared/logs/*.log ; do
-          echo $v logtail ======================== ; tail -100 $v ;
-          done
-        - for v in broker middlemanager overlord router coordinator historical ; do
-          echo $v dmesg ======================== ;
-          docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
-          done
+    - name: "kafka index integration test"
+      services: *integration_test_services
+      env: TESTNG_GROUPS='-Dgroups=kafka-index'
+      script: *run_integration_test
+      after_failure: *integration_test_diags
+
+    - name: "query integration test"
+      services: *integration_test_services
+      env: TESTNG_GROUPS='-Dgroups=query'
+      script: *run_integration_test
+      after_failure: *integration_test_diags
+
+    - name: "realtime index integration test"
+      services: *integration_test_services
+      env: TESTNG_GROUPS='-Dgroups=realtime-index'
+      script: *run_integration_test
+      after_failure: *integration_test_diags
+
+    - name: "other integration test"
+      services: *integration_test_services
+      env: TESTNG_GROUPS='-DexcludedGroups=batch-index,kafka-index,query,realtime-index'
+      script: *run_integration_test
+      after_failure: *integration_test_diags
diff --git a/build.sh b/build.sh
index bf539b4..dd21512 100755
--- a/build.sh
+++ b/build.sh
@@ -18,10 +18,23 @@ MAVEN_OPTS='-Xmx3000m'
 
 mkdir -p target
 
-# Generate dependency reports and checks they are valid.
-docs/_bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer
+# Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available
+# (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system).
+docs/_bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2
 
 docs/_bin/generate-license.py licenses/APACHE2 licenses.yaml LICENSES.BINARY --dependency-reports target/license-reports
 
-# Build binary distribution. Note that the below command internally runs 'docs/_bin/generate-license.py' without license check which overwrites LICENSES.BINARY file generated by the above command. This must be fine since both commands are supposed to generate the same contents.
-mvn -DskipTests -Dforbiddenapis.skip=true -Dcheckstyle.skip=true -Dpmd.skip=true -Dmaven.javadoc.skip=true -pl '!benchmarks' -B --fail-at-end install -Pdist -Pbundle-contrib-exts
+# Build binary distribution. Note that the below command internally runs 'docs/_bin/generate-license.py' without license
+# check which overwrites LICENSES.BINARY file generated by the above command. This must be fine since both commands are
+# supposed to generate the same contents.
+mvn -B --fail-at-end install -Pdist -Pbundle-contrib-exts \
+  -pl '!benchmarks' \
+  -Danimal.sniffer.skip=true \
+  -Dcheckstyle.skip=true \
+  -Denforcer.skip=true \
+  -Dforbiddenapis.skip=true \
+  -Dmaven.javadoc.skip=true \
+  -Dpmd.skip=true \
+  -DskipTests \
+  -Dspotbugs.skip=true \
+  -T1.0C
diff --git a/ci/travis_script_integration.sh b/ci/travis_script_integration.sh
deleted file mode 100755
index 99b6085..0000000
--- a/ci/travis_script_integration.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-
-pushd $TRAVIS_BUILD_DIR/integration-tests
-
-mvn verify -P integration-tests -Dit.test=ITAppenderatorDriverRealtimeIndexTaskTest,ITCompactionTaskTest,ITIndexerTest,ITKafkaIndexingServiceTest,ITKafkaIndexingServiceTransactionalTest,ITParallelIndexTest,ITRealtimeIndexTaskTest
-
-popd
diff --git a/ci/travis_script_integration_part2.sh b/ci/travis_script_integration_part2.sh
deleted file mode 100755
index 8c9f5ee..0000000
--- a/ci/travis_script_integration_part2.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -e
-
-pushd $TRAVIS_BUILD_DIR/integration-tests
-
-mvn verify -P integration-tests -Dit.test=ITUnionQueryTest,ITNestedQueryPushDownTest,ITTwitterQueryTest,ITWikipediaQueryTest,ITBasicAuthConfigurationTest,ITTLSTest,ITSystemTableQueryTest,ITSystemTableBatchIndexTaskTest
-
-popd
diff --git a/integration-tests/README.md b/integration-tests/README.md
index 37435f0..1f7b4de 100644
--- a/integration-tests/README.md
+++ b/integration-tests/README.md
@@ -218,8 +218,3 @@ This will tell the test framework that the test class needs to be constructed us
 2) FromFileTestQueryHelper - reads queries with expected results from file and executes them and verifies the results using ResultVerifier
 
 Refer ITIndexerTest as an example on how to use dependency Injection
-
-### Register new tests for Travis CI
-
-Once you add new integration tests, don't forget to add them to `{DRUID_ROOT}/ci/travis_script_integration.sh`
-or `{DRUID_ROOT}/ci/travis_script_integration_part2.sh` for Travis CI to run them.
diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml
index d9fd245..5450284 100644
--- a/integration-tests/pom.xml
+++ b/integration-tests/pom.xml
@@ -234,6 +234,7 @@
                                 -Ddruid.client.https.keyManagerPassword=druid123
                                 -Ddruid.client.https.keyStorePassword=druid123
                             </argLine>
+                            <reportFormat>plain</reportFormat>  <!-- show elapsed time for each test -->
                             <suiteXmlFiles>
                                 <suiteXmlFile>src/test/resources/testng.xml</suiteXmlFile>
                             </suiteXmlFiles>
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
similarity index 52%
copy from integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java
copy to integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
index 5ce4ede..dc37952 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
@@ -17,26 +17,19 @@
  * under the License.
  */
 
-package org.apache.druid.tests.security;
+package org.apache.druid.tests;
 
-import com.google.inject.Inject;
-import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
-import org.apache.druid.testing.guice.DruidTestModuleFactory;
-import org.jboss.netty.handler.codec.http.HttpResponseStatus;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITCoordinatorOverlordProxyAuthTest
+/**
+ * These groups are used by continuous integration to split the integration tests into multiple jobs. Any tests that
+ * are not annotated with a group will still be run by an "other" integration test continuous integration job.
+ */
+public class TestNGGroup
 {
-  @Inject
-  CoordinatorResourceTestClient coordinatorClient;
-  
-  @Test
-  public void testProxyAuth()
-  {
-    HttpResponseStatus responseStatus = coordinatorClient.getProxiedOverlordScalingResponseStatus();
-    Assert.assertEquals(HttpResponseStatus.OK, responseStatus);
-  }
+  public static final String BATCH_INDEX = "batch-index";
+  public static final String HADOOP_INDEX = "hadoop-index";
+  public static final String KAFKA_INDEX = "kafka-index";
+  public static final String OTHER_INDEX = "other-index";
+  public static final String QUERY = "query";
+  public static final String REALTIME_INDEX = "realtime-index";
+  public static final String SECURITY = "security";
 }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java b/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
index 4ed0bc2..f96b8fd 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
@@ -25,14 +25,14 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.IntegrationTestingConfig;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
+import org.apache.druid.tests.TestNGGroup;
 import org.apache.druid.tests.indexer.AbstractIndexerTest;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
-import java.util.concurrent.Callable;
-
+@Test(groups = TestNGGroup.HADOOP_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITHadoopIndexTest extends AbstractIndexerTest
 {
@@ -77,15 +77,7 @@ public class ITHadoopIndexTest extends AbstractIndexerTest
       LOG.info("TaskID for loading index task %s", taskID);
       indexer.waitUntilTaskCompletes(taskID, 10000, 120);
       RetryUtil.retryUntil(
-          new Callable<Boolean>()
-          {
-            @Override
-            public Boolean call()
-            {
-              return coordinator.areSegmentsLoaded(BATCH_DATASOURCE);
-
-            }
-          },
+          () -> coordinator.areSegmentsLoaded(BATCH_DATASOURCE),
           true,
           20000,
           10,
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITBatchIndexTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITBatchIndexTest.java
index dd29458..4f9c0fc 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITBatchIndexTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITBatchIndexTest.java
@@ -36,9 +36,10 @@ import org.junit.Assert;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
-public class AbstractITBatchIndexTest extends AbstractIndexerTest
+public abstract class AbstractITBatchIndexTest extends AbstractIndexerTest
 {
   private static final Logger LOG = new Logger(AbstractITBatchIndexTest.class);
 
@@ -70,7 +71,7 @@ public class AbstractITBatchIndexTest extends AbstractIndexerTest
       String queryResponseTemplate;
       try {
         InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(queryFilePath);
-        queryResponseTemplate = IOUtils.toString(is, "UTF-8");
+        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
       }
       catch (IOException e) {
         throw new ISE(e, "could not read query file: %s", queryFilePath);
@@ -117,7 +118,7 @@ public class AbstractITBatchIndexTest extends AbstractIndexerTest
       String queryResponseTemplate;
       try {
         InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(queryFilePath);
-        queryResponseTemplate = IOUtils.toString(is, "UTF-8");
+        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
       }
       catch (IOException e) {
         throw new ISE(e, "could not read query file: %s", queryFilePath);
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
index eefcba2..9f959ae 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
@@ -36,7 +36,7 @@ import org.joda.time.format.DateTimeFormatter;
 
 import java.io.Closeable;
 import java.io.InputStream;
-import java.util.concurrent.Callable;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.TimeUnit;
 
 /**
@@ -57,14 +57,13 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
   private static final String INDEX_DATASOURCE = "wikipedia_index_test";
 
   static final int DELAY_BETWEEN_EVENTS_SECS = 4;
-  String taskID;
   final String TIME_PLACEHOLDER = "YYYY-MM-DDTHH:MM:SS";
   // format for putting datestamp into events
-  final DateTimeFormatter EVENT_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss");
+  static final DateTimeFormatter EVENT_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss");
   // format for the querying interval
-  final DateTimeFormatter INTERVAL_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:'00Z'");
+  private static final DateTimeFormatter INTERVAL_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:'00Z'");
   // format for the expected timestamp in a query response
-  final DateTimeFormatter TIMESTAMP_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'.000Z'");
+  private static final DateTimeFormatter TIMESTAMP_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'.000Z'");
   DateTime dtFirst;            // timestamp of 1st event
   DateTime dtLast;             // timestamp of last event
   DateTime dtGroupBy;          // timestamp for expected response for groupBy query
@@ -84,8 +83,8 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
   {
     fullDatasourceName = INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix();
 
-    LOG.info("Starting test: ITRealtimeIndexTaskTest");
-    try (final Closeable closeable = unloader(fullDatasourceName)) {
+    LOG.info("Starting test: %s", this.getClass().getSimpleName());
+    try (final Closeable ignored = unloader(fullDatasourceName)) {
       // the task will run for 3 minutes and then shutdown itself
       String task = setShutOffTime(
           getResourceAsString(getTaskResource()),
@@ -94,7 +93,7 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
       task = StringUtils.replace(task, "%%DATASOURCE%%", fullDatasourceName);
 
       LOG.info("indexerSpec: [%s]\n", task);
-      taskID = indexer.submitTask(task);
+      String taskID = indexer.submitTask(task);
 
 
       // sleep for a while to let peons finish starting up
@@ -115,7 +114,7 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
       if (null == is) {
         throw new ISE("could not open query file: %s", getQueriesResource());
       }
-      query_response_template = IOUtils.toString(is, "UTF-8");
+      query_response_template = IOUtils.toString(is, StandardCharsets.UTF_8);
 
       String queryStr = query_response_template;
       queryStr = StringUtils.replace(queryStr, "%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst));
@@ -145,14 +144,7 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
 
       // task should complete only after the segments are loaded by historical node
       RetryUtil.retryUntil(
-          new Callable<Boolean>()
-          {
-            @Override
-            public Boolean call()
-            {
-              return coordinator.areSegmentsLoaded(fullDatasourceName);
-            }
-          },
+          () -> coordinator.areSegmentsLoaded(fullDatasourceName),
           true,
           10000,
           60,
@@ -167,12 +159,12 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
     }
   }
 
-  String setShutOffTime(String taskAsString, DateTime time)
+  private String setShutOffTime(String taskAsString, DateTime time)
   {
     return StringUtils.replace(taskAsString, "#SHUTOFFTIME", time.toString());
   }
 
-  String getRouterURL()
+  private String getRouterURL()
   {
     return StringUtils.format(
         "%s/druid/v2?pretty",
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractKafkaIndexerTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractKafkaIndexerTest.java
index e01c6e6..9be8f69 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractKafkaIndexerTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractKafkaIndexerTest.java
@@ -48,11 +48,11 @@ import org.joda.time.format.DateTimeFormatter;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.Properties;
-import java.util.concurrent.Callable;
 
-public class AbstractKafkaIndexerTest extends AbstractIndexerTest
+abstract class AbstractKafkaIndexerTest extends AbstractIndexerTest
 {
   private static final Logger LOG = new Logger(AbstractKafkaIndexerTest.class);
   private static final String INDEXER_FILE = "/indexer/kafka_supervisor_spec.json";
@@ -61,11 +61,11 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
 
   private static final int NUM_EVENTS_TO_SEND = 60;
   private static final long WAIT_TIME_MILLIS = 2 * 60 * 1000L;
-  public static final String testPropertyPrefix = "kafka.test.property.";
+  private static final String TEST_PROPERTY_PREFIX = "kafka.test.property.";
 
   // We'll fill in the current time and numbers for added, deleted and changed
   // before sending the event.
-  final String event_template =
+  private static final String EVENT_TEMPLATE =
       "{\"timestamp\": \"%s\"," +
       "\"page\": \"Gypsy Danger\"," +
       "\"language\" : \"en\"," +
@@ -83,17 +83,13 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
       "\"deleted\":%d," +
       "\"delta\":%d}";
 
-  private String supervisorId;
-  private ZkClient zkClient;
   private ZkUtils zkUtils;
   private boolean segmentsExist;   // to tell if we should remove segments during teardown
 
   // format for the querying interval
-  private final DateTimeFormatter INTERVAL_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:'00Z'");
+  private static final DateTimeFormatter INTERVAL_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:'00Z'");
   // format for the expected timestamp in a query response
-  private final DateTimeFormatter TIMESTAMP_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'.000Z'");
-  private DateTime dtFirst;                // timestamp of 1st event
-  private DateTime dtLast;                 // timestamp of last event
+  private static final DateTimeFormatter TIMESTAMP_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'.000Z'");
 
   @Inject
   private TestQueryHelper queryHelper;
@@ -110,7 +106,7 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
       int sessionTimeoutMs = 10000;
       int connectionTimeoutMs = 10000;
       String zkHosts = config.getZookeeperHosts();
-      zkClient = new ZkClient(zkHosts, sessionTimeoutMs, connectionTimeoutMs, ZKStringSerializer$.MODULE$);
+      ZkClient zkClient = new ZkClient(zkHosts, sessionTimeoutMs, connectionTimeoutMs, ZKStringSerializer$.MODULE$);
       zkUtils = new ZkUtils(zkClient, new ZkConnection(zkHosts, sessionTimeoutMs), false);
       if (config.manageKafkaTopic()) {
         int numPartitions = 4;
@@ -136,7 +132,7 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
       final Map<String, Object> consumerConfigs = KafkaConsumerConfigs.getConsumerProperties();
       final Properties consumerProperties = new Properties();
       consumerProperties.putAll(consumerConfigs);
-      consumerProperties.put("bootstrap.servers", config.getKafkaInternalHost());
+      consumerProperties.setProperty("bootstrap.servers", config.getKafkaInternalHost());
 
       spec = getResourceAsString(INDEXER_FILE);
       spec = StringUtils.replace(spec, "%%DATASOURCE%%", fullDatasourceName);
@@ -150,21 +146,21 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
     }
 
     // start supervisor
-    supervisorId = indexer.submitSupervisor(spec);
+    String supervisorId = indexer.submitSupervisor(spec);
     LOG.info("Submitted supervisor");
 
     // set up kafka producer
     Properties properties = new Properties();
     addFilteredProperties(config, properties);
-    properties.put("bootstrap.servers", config.getKafkaHost());
+    properties.setProperty("bootstrap.servers", config.getKafkaHost());
     LOG.info("Kafka bootstrap.servers: [%s]", config.getKafkaHost());
-    properties.put("acks", "all");
-    properties.put("retries", "3");
-    properties.put("key.serializer", ByteArraySerializer.class.getName());
-    properties.put("value.serializer", ByteArraySerializer.class.getName());
+    properties.setProperty("acks", "all");
+    properties.setProperty("retries", "3");
+    properties.setProperty("key.serializer", ByteArraySerializer.class.getName());
+    properties.setProperty("value.serializer", ByteArraySerializer.class.getName());
     if (txnEnabled) {
-      properties.put("enable.idempotence", "true");
-      properties.put("transactional.id", RandomIdUtils.getRandomId());
+      properties.setProperty("enable.idempotence", "true");
+      properties.setProperty("transactional.id", RandomIdUtils.getRandomId());
     }
 
     KafkaProducer<String, String> producer = new KafkaProducer<>(
@@ -178,8 +174,8 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
     DateTimeFormatter event_fmt = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
 
     DateTime dt = new DateTime(zone); // timestamp to put on events
-    dtFirst = dt;            // timestamp of 1st event
-    dtLast = dt;             // timestamp of last event
+    DateTime dtFirst = dt;            // timestamp of 1st event
+    DateTime dtLast = dt;             // timestamp of last event
 
     // these are used to compute the expected aggregations
     int added = 0;
@@ -194,11 +190,11 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
       num_events++;
       added += num_events;
       // construct the event to send
-      String event = StringUtils.format(event_template, event_fmt.print(dt), num_events, 0, num_events);
+      String event = StringUtils.format(EVENT_TEMPLATE, event_fmt.print(dt), num_events, 0, num_events);
       LOG.info("sending event: [%s]", event);
       try {
 
-        producer.send(new ProducerRecord<String, String>(TOPIC_NAME, event)).get();
+        producer.send(new ProducerRecord<>(TOPIC_NAME, event)).get();
 
       }
       catch (Exception ioe) {
@@ -230,7 +226,7 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
     // put the timestamps into the query structure
     String query_response_template;
     try {
-      query_response_template = IOUtils.toString(is, "UTF-8");
+      query_response_template = IOUtils.toString(is, StandardCharsets.UTF_8);
     }
     catch (IOException e) {
       throw new ISE(e, "could not read query file: %s", QUERIES_FILE);
@@ -261,28 +257,16 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
     // wait for all kafka indexing tasks to finish
     LOG.info("Waiting for all kafka indexing tasks to finish");
     RetryUtil.retryUntilTrue(
-        new Callable<Boolean>()
-        {
-          @Override
-          public Boolean call()
-          {
-            return (indexer.getPendingTasks().size() + indexer.getRunningTasks().size() + indexer.getWaitingTasks()
-                                                                                                 .size()) == 0;
-          }
-        }, "Waiting for Tasks Completion"
+        () -> (indexer.getPendingTasks().size()
+               + indexer.getRunningTasks().size()
+               + indexer.getWaitingTasks().size()) == 0,
+        "Waiting for Tasks Completion"
     );
 
     // wait for segments to be handed off
     try {
       RetryUtil.retryUntil(
-          new Callable<Boolean>()
-          {
-            @Override
-            public Boolean call()
-            {
-              return coordinator.areSegmentsLoaded(fullDatasourceName);
-            }
-          },
+          () -> coordinator.areSegmentsLoaded(fullDatasourceName),
           true,
           10000,
           30,
@@ -307,8 +291,8 @@ public class AbstractKafkaIndexerTest extends AbstractIndexerTest
   private void addFilteredProperties(IntegrationTestingConfig config, Properties properties)
   {
     for (Map.Entry<String, String> entry : config.getProperties().entrySet()) {
-      if (entry.getKey().startsWith(testPropertyPrefix)) {
-        properties.put(entry.getKey().substring(testPropertyPrefix.length()), entry.getValue());
+      if (entry.getKey().startsWith(TEST_PROPERTY_PREFIX)) {
+        properties.setProperty(entry.getKey().substring(TEST_PROPERTY_PREFIX.length()), entry.getValue());
       }
     }
   }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
index 5887c4c..ddfead7 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
@@ -28,6 +28,7 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.clients.EventReceiverFirehoseTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.ServerDiscoveryUtil;
+import org.apache.druid.tests.TestNGGroup;
 import org.joda.time.DateTime;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -43,6 +44,7 @@ import java.util.Map;
 /**
  * See {@link AbstractITRealtimeIndexTaskTest} for test details.
  */
+@Test(groups = TestNGGroup.REALTIME_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
 {
@@ -57,11 +59,10 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
   }
 
   @Override
-  public void postEvents() throws Exception
+  void postEvents() throws Exception
   {
     final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
     eventReceiverSelector.start();
-    BufferedReader reader = null;
     InputStreamReader isr;
     try {
       isr = new InputStreamReader(
@@ -72,8 +73,7 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
     catch (Exception e) {
       throw new RuntimeException(e);
     }
-    try {
-      reader = new BufferedReader(isr);
+    try (BufferedReader reader = new BufferedReader(isr)) {
       ServerDiscoveryUtil.waitUntilInstanceReady(eventReceiverSelector, "Event Receiver");
       // Use the host from the config file and the port announced in zookeeper
       String host = config.getMiddleManagerHost() + ":" + eventReceiverSelector.pick().getPort();
@@ -102,7 +102,7 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
         }
         String event = StringUtils.replace(line, TIME_PLACEHOLDER, EVENT_FMT.print(dt));
         LOG.info("sending event: [%s]\n", event);
-        Collection<Map<String, Object>> events = new ArrayList<Map<String, Object>>();
+        Collection<Map<String, Object>> events = new ArrayList<>();
         events.add(this.jsonMapper.readValue(event, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT));
         int eventsPosted = client.postEvents(events, this.jsonMapper, MediaType.APPLICATION_JSON);
         if (eventsPosted != events.size()) {
@@ -112,7 +112,7 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
         try {
           Thread.sleep(DELAY_BETWEEN_EVENTS_SECS * 1000);
         }
-        catch (InterruptedException ex) {
+        catch (InterruptedException ignored) {
           /* nothing */
         }
         dtLast = dt; // latest timestamp
@@ -124,7 +124,6 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
       throw new RuntimeException(e);
     }
     finally {
-      reader.close();
       eventReceiverSelector.stop();
     }
   }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
index c13147b..477dd78 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
@@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.IntegrationTestingConfig;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.BeforeSuite;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -34,16 +35,18 @@ import org.testng.annotations.Test;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
+@Test(groups = TestNGGroup.OTHER_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITCompactionTaskTest extends AbstractIndexerTest
 {
   private static final Logger LOG = new Logger(ITCompactionTaskTest.class);
-  private static String INDEX_TASK = "/indexer/wikipedia_index_task.json";
-  private static String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
-  private static String INDEX_DATASOURCE = "wikipedia_index_test";
-  private static String COMPACTION_TASK = "/indexer/wikipedia_compaction_task.json";
+  private static final String INDEX_TASK = "/indexer/wikipedia_index_task.json";
+  private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
+  private static final String INDEX_DATASOURCE = "wikipedia_index_test";
+  private static final String COMPACTION_TASK = "/indexer/wikipedia_compaction_task.json";
 
   @Inject
   private IntegrationTestingConfig config;
@@ -62,11 +65,11 @@ public class ITCompactionTaskTest extends AbstractIndexerTest
     loadData();
     final List<String> intervalsBeforeCompaction = coordinator.getSegmentIntervals(fullDatasourceName);
     intervalsBeforeCompaction.sort(null);
-    try (final Closeable closeable = unloader(fullDatasourceName)) {
+    try (final Closeable ignored = unloader(fullDatasourceName)) {
       String queryResponseTemplate;
       try {
         InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(INDEX_QUERIES_RESOURCE);
-        queryResponseTemplate = IOUtils.toString(is, "UTF-8");
+        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
       }
       catch (IOException e) {
         throw new ISE(e, "could not read query file: %s", INDEX_QUERIES_RESOURCE);
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITIndexerTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITIndexerTest.java
index adc0f88..56afbca 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITIndexerTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITIndexerTest.java
@@ -20,28 +20,30 @@
 package org.apache.druid.tests.indexer;
 
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.io.Closeable;
 
+@Test(groups = TestNGGroup.BATCH_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITIndexerTest extends AbstractITBatchIndexTest
 {
-  private static String INDEX_TASK = "/indexer/wikipedia_index_task.json";
-  private static String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
-  private static String INDEX_DATASOURCE = "wikipedia_index_test";
+  private static final String INDEX_TASK = "/indexer/wikipedia_index_task.json";
+  private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
+  private static final String INDEX_DATASOURCE = "wikipedia_index_test";
 
-  private static String REINDEX_TASK = "/indexer/wikipedia_reindex_task.json";
-  private static String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_reindex_queries.json";
-  private static String REINDEX_DATASOURCE = "wikipedia_reindex_test";
+  private static final String REINDEX_TASK = "/indexer/wikipedia_reindex_task.json";
+  private static final String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_reindex_queries.json";
+  private static final String REINDEX_DATASOURCE = "wikipedia_reindex_test";
 
   @Test
   public void testIndexData() throws Exception
   {
     try (
-        final Closeable indexCloseable = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
-        final Closeable reindexCloseable = unloader(REINDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+        final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+        final Closeable ignored2 = unloader(REINDEX_DATASOURCE + config.getExtraDatasourceNameSuffix())
     ) {
       doIndexTestTest(
           INDEX_DATASOURCE,
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTest.java
index e9cc84e..bf74eea 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTest.java
@@ -21,13 +21,12 @@ package org.apache.druid.tests.indexer;
 
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
-/**
- * This is a test for the Kafka indexing service.
- */
+@Test(groups = TestNGGroup.KAFKA_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITKafkaIndexingServiceTest extends AbstractKafkaIndexerTest
 {
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTransactionalTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTransactionalTest.java
index 07bcae5..7424561 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTransactionalTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaIndexingServiceTransactionalTest.java
@@ -21,6 +21,7 @@ package org.apache.druid.tests.indexer;
 
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -28,6 +29,7 @@ import org.testng.annotations.Test;
 /**
  * This is a test for the Kafka indexing service with transactional topics
  */
+@Test(groups = TestNGGroup.KAFKA_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITKafkaIndexingServiceTransactionalTest extends AbstractKafkaIndexerTest
 {
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
index 0763089..853b1f0 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
@@ -30,13 +30,16 @@ import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.TestQueryHelper;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.BeforeSuite;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 
+@Test(groups = TestNGGroup.QUERY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITNestedQueryPushDownTest extends AbstractIndexerTest
 {
@@ -74,7 +77,7 @@ public class ITNestedQueryPushDownTest extends AbstractIndexerTest
       String queryResponseTemplate;
       try {
         InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(WIKITICKER_QUERIES_RESOURCE);
-        queryResponseTemplate = IOUtils.toString(is, "UTF-8");
+        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
       }
       catch (IOException e) {
         throw new ISE(e, "could not read query file: %s", WIKITICKER_QUERIES_RESOURCE);
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITParallelIndexTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITParallelIndexTest.java
index b3920a1..7ed622c 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITParallelIndexTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITParallelIndexTest.java
@@ -20,28 +20,30 @@
 package org.apache.druid.tests.indexer;
 
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.io.Closeable;
 
+@Test(groups = TestNGGroup.BATCH_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITParallelIndexTest extends AbstractITBatchIndexTest
 {
-  private static String INDEX_TASK = "/indexer/wikipedia_parallel_index_task.json";
-  private static String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_parallel_index_queries.json";
-  private static String REINDEX_TASK = "/indexer/wikipedia_parallel_reindex_task.json";
-  private static String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_parallel_reindex_queries.json";
-  private static String INDEX_DATASOURCE = "wikipedia_parallel_index_test";
-  private static String INDEX_INGEST_SEGMENT_DATASOURCE = "wikipedia_parallel_ingest_segment_index_test";
-  private static String INDEX_INGEST_SEGMENT_TASK = "/indexer/wikipedia_parallel_ingest_segment_index_task.json";
+  private static final String INDEX_TASK = "/indexer/wikipedia_parallel_index_task.json";
+  private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_parallel_index_queries.json";
+  private static final String REINDEX_TASK = "/indexer/wikipedia_parallel_reindex_task.json";
+  private static final String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_parallel_reindex_queries.json";
+  private static final String INDEX_DATASOURCE = "wikipedia_parallel_index_test";
+  private static final String INDEX_INGEST_SEGMENT_DATASOURCE = "wikipedia_parallel_ingest_segment_index_test";
+  private static final String INDEX_INGEST_SEGMENT_TASK = "/indexer/wikipedia_parallel_ingest_segment_index_task.json";
 
   @Test
   public void testIndexData() throws Exception
   {
-    try (final Closeable indexCloseable = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
-         final Closeable ingestSegmentCloseable = unloader(
-             INDEX_INGEST_SEGMENT_DATASOURCE + config.getExtraDatasourceNameSuffix());
+    try (final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+         final Closeable ignored2 = unloader(
+             INDEX_INGEST_SEGMENT_DATASOURCE + config.getExtraDatasourceNameSuffix())
     ) {
       doIndexTestTest(
           INDEX_DATASOURCE,
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
index 0e52690..00a6618 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
@@ -28,6 +28,7 @@ import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.clients.EventReceiverFirehoseTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.ServerDiscoveryUtil;
+import org.apache.druid.tests.TestNGGroup;
 import org.joda.time.DateTime;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -43,6 +44,7 @@ import java.util.Map;
 /**
  * See {@link AbstractITRealtimeIndexTaskTest} for test details.
  */
+@Test(groups = TestNGGroup.REALTIME_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
 {
@@ -69,11 +71,10 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
   }
 
   @Override
-  public void postEvents() throws Exception
+  void postEvents() throws Exception
   {
     final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
     eventReceiverSelector.start();
-    BufferedReader reader = null;
     InputStreamReader isr;
     try {
       isr = new InputStreamReader(
@@ -84,8 +85,7 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
     catch (Exception e) {
       throw new RuntimeException(e);
     }
-    try {
-      reader = new BufferedReader(isr);
+    try (BufferedReader reader = new BufferedReader(isr)) {
       ServerDiscoveryUtil.waitUntilInstanceReady(eventReceiverSelector, "Event Receiver");
       // Use the host from the config file and the port announced in zookeeper
       String host = config.getMiddleManagerHost() + ":" + eventReceiverSelector.pick().getPort();
@@ -113,7 +113,7 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
         }
         String event = StringUtils.replace(line, TIME_PLACEHOLDER, EVENT_FMT.print(dt));
         LOG.info("sending event: [%s]\n", event);
-        Collection<Map<String, Object>> events = new ArrayList<Map<String, Object>>();
+        Collection<Map<String, Object>> events = new ArrayList<>();
         events.add(this.jsonMapper.readValue(event, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT));
         int eventsPosted = client.postEvents(events, this.jsonMapper, MediaType.APPLICATION_JSON);
         if (eventsPosted != events.size()) {
@@ -123,7 +123,7 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
         try {
           Thread.sleep(DELAY_BETWEEN_EVENTS_SECS * 1000);
         }
-        catch (InterruptedException ex) {
+        catch (InterruptedException ignored) {
           /* nothing */
         }
         dtLast = dt;
@@ -135,7 +135,6 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
       throw new RuntimeException(e);
     }
     finally {
-      reader.close();
       eventReceiverSelector.stop();
     }
   }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITSystemTableBatchIndexTaskTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITSystemTableBatchIndexTaskTest.java
index c138c9f..df742e4 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITSystemTableBatchIndexTaskTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITSystemTableBatchIndexTaskTest.java
@@ -21,26 +21,27 @@ package org.apache.druid.tests.indexer;
 
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.io.Closeable;
 
+@Test(groups = TestNGGroup.BATCH_INDEX)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITSystemTableBatchIndexTaskTest extends AbstractITBatchIndexTest
 {
-
   private static final Logger LOG = new Logger(ITSystemTableBatchIndexTaskTest.class);
-  private static String INDEX_TASK = "/indexer/wikipedia_index_task.json";
-  private static String SYSTEM_QUERIES_RESOURCE = "/indexer/sys_segment_batch_index_queries.json";
-  private static String INDEX_DATASOURCE = "wikipedia_index_test";
+  private static final String INDEX_TASK = "/indexer/wikipedia_index_task.json";
+  private static final String SYSTEM_QUERIES_RESOURCE = "/indexer/sys_segment_batch_index_queries.json";
+  private static final String INDEX_DATASOURCE = "wikipedia_index_test";
 
   @Test
   public void testIndexData() throws Exception
   {
     LOG.info("Starting batch index sys table queries");
     try (
-        final Closeable indexCloseable = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+        final Closeable ignored = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix())
     ) {
       doIndexTestSqlTest(
           INDEX_DATASOURCE,
@@ -49,5 +50,4 @@ public class ITSystemTableBatchIndexTaskTest extends AbstractITBatchIndexTest
       );
     }
   }
-
 }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java
index ef577a7..4dd49b9 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java
@@ -38,6 +38,7 @@ import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.guice.TestClient;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.ServerDiscoveryUtil;
+import org.apache.druid.tests.TestNGGroup;
 import org.jboss.netty.handler.codec.http.HttpMethod;
 import org.jboss.netty.handler.codec.http.HttpResponseStatus;
 import org.joda.time.DateTime;
@@ -48,11 +49,12 @@ import org.testng.annotations.Test;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
+@Test(groups = TestNGGroup.QUERY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITUnionQueryTest extends AbstractIndexerTest
 {
@@ -134,7 +136,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest
       String queryResponseTemplate;
       try {
         InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(UNION_QUERIES_RESOURCE);
-        queryResponseTemplate = IOUtils.toString(is, "UTF-8");
+        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
       }
       catch (IOException e) {
         throw new ISE(e, "could not read query file: %s", UNION_QUERIES_RESOURCE);
@@ -156,14 +158,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest
       for (int i = 0; i < numTasks; i++) {
         final int taskNum = i;
         RetryUtil.retryUntil(
-            new Callable<Boolean>()
-            {
-              @Override
-              public Boolean call()
-              {
-                return coordinator.areSegmentsLoaded(fullDatasourceName + taskNum);
-              }
-            },
+            () -> coordinator.areSegmentsLoaded(fullDatasourceName + taskNum),
             true,
             10000,
             10,
@@ -197,7 +192,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest
     return StringUtils.replace(taskAsString, EVENT_RECEIVER_SERVICE_PREFIX, serviceName);
   }
 
-  public void postEvents(int id) throws Exception
+  private void postEvents(int id) throws Exception
   {
     final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_PREFIX + id);
     eventReceiverSelector.start();
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java
index 82089f4..83850b3 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java
@@ -25,10 +25,12 @@ import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.SqlTestQueryHelper;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
+@Test(groups = TestNGGroup.QUERY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITSystemTableQueryTest
 {
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java
index d43378e..ad47348 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java
@@ -24,12 +24,12 @@ import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.TestQueryHelper;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
-import java.util.concurrent.Callable;
-
+@Test(groups = TestNGGroup.QUERY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITTwitterQueryTest
 {
@@ -45,14 +45,7 @@ public class ITTwitterQueryTest
   {
     // ensure that the twitter segments are loaded completely
     RetryUtil.retryUntilTrue(
-        new Callable<Boolean>()
-        {
-          @Override
-          public Boolean call()
-          {
-            return coordinatorClient.areSegmentsLoaded(TWITTER_DATA_SOURCE);
-          }
-        }, "twitter segment load"
+        () -> coordinatorClient.areSegmentsLoaded(TWITTER_DATA_SOURCE), "twitter segment load"
     );
   }
 
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java
index fa1658c..f0af551 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java
@@ -24,10 +24,12 @@ import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.TestQueryHelper;
+import org.apache.druid.tests.TestNGGroup;
 import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
+@Test(groups = TestNGGroup.QUERY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITWikipediaQueryTest
 {
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
index 0c76339..cb33706 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
@@ -47,6 +47,7 @@ import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.RetryUtil;
 import org.apache.druid.testing.utils.TestQueryHelper;
+import org.apache.druid.tests.TestNGGroup;
 import org.jboss.netty.handler.codec.http.HttpMethod;
 import org.jboss.netty.handler.codec.http.HttpResponseStatus;
 import org.testng.Assert;
@@ -67,6 +68,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.stream.Collectors;
 
+@Test(groups = TestNGGroup.SECURITY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITBasicAuthConfigurationTest
 {
@@ -268,9 +270,7 @@ public class ITBasicAuthConfigurationTest
         datasourceOnlyUserClient,
         SYS_SCHEMA_SEGMENTS_QUERY,
         adminSegments.stream()
-                     .filter((segmentEntry) -> {
-                       return "auth_test".equals(segmentEntry.get("datasource"));
-                     })
+                     .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource")))
                      .collect(Collectors.toList())
     );
 
@@ -295,9 +295,7 @@ public class ITBasicAuthConfigurationTest
         datasourceOnlyUserClient,
         SYS_SCHEMA_TASKS_QUERY,
         adminTasks.stream()
-                     .filter((taskEntry) -> {
-                       return "auth_test".equals(taskEntry.get("datasource"));
-                     })
+                     .filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource")))
                      .collect(Collectors.toList())
     );
 
@@ -307,9 +305,7 @@ public class ITBasicAuthConfigurationTest
         datasourceWithStateUserClient,
         SYS_SCHEMA_SEGMENTS_QUERY,
         adminSegments.stream()
-                     .filter((segmentEntry) -> {
-                       return "auth_test".equals(segmentEntry.get("datasource"));
-                     })
+                     .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource")))
                      .collect(Collectors.toList())
     );
 
@@ -325,9 +321,7 @@ public class ITBasicAuthConfigurationTest
         datasourceWithStateUserClient,
         SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
         adminServerSegments.stream()
-                           .filter((serverSegmentEntry) -> {
-                             return ((String) serverSegmentEntry.get("segment_id")).contains("auth_test");
-                           })
+                           .filter((serverSegmentEntry) -> ((String) serverSegmentEntry.get("segment_id")).contains("auth_test"))
                            .collect(Collectors.toList())
     );
 
@@ -336,9 +330,7 @@ public class ITBasicAuthConfigurationTest
         datasourceWithStateUserClient,
         SYS_SCHEMA_TASKS_QUERY,
         adminTasks.stream()
-                     .filter((taskEntry) -> {
-                       return "auth_test".equals(taskEntry.get("datasource"));
-                     })
+                     .filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource")))
                      .collect(Collectors.toList())
     );
 
@@ -502,8 +494,8 @@ public class ITBasicAuthConfigurationTest
     LOG.info("URL: " + url);
     try {
       Properties connectionProperties = new Properties();
-      connectionProperties.put("user", "admin");
-      connectionProperties.put("password", "priest");
+      connectionProperties.setProperty("user", "admin");
+      connectionProperties.setProperty("password", "priest");
       Connection connection = DriverManager.getConnection(url, connectionProperties);
       Statement statement = connection.createStatement();
       statement.setMaxRows(450);
@@ -523,8 +515,8 @@ public class ITBasicAuthConfigurationTest
     LOG.info("URL: " + url);
     try {
       Properties connectionProperties = new Properties();
-      connectionProperties.put("user", "admin");
-      connectionProperties.put("password", "wrongpassword");
+      connectionProperties.setProperty("user", "admin");
+      connectionProperties.setProperty("password", "wrongpassword");
       Connection connection = DriverManager.getConnection(url, connectionProperties);
       Statement statement = connection.createStatement();
       statement.setMaxRows(450);
@@ -793,8 +785,7 @@ public class ITBasicAuthConfigurationTest
     return Lists.transform(
         servers,
         (server) -> {
-          Map<String, Object> newServer = new HashMap<>();
-          newServer.putAll(server);
+          Map<String, Object> newServer = new HashMap<>(server);
           newServer.put("curr_size", 0);
           return newServer;
         }
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java
index 5ce4ede..f95ff5c 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java
@@ -22,11 +22,13 @@ package org.apache.druid.tests.security;
 import com.google.inject.Inject;
 import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
 import org.jboss.netty.handler.codec.http.HttpResponseStatus;
 import org.testng.Assert;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
+@Test(groups = TestNGGroup.SECURITY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITCoordinatorOverlordProxyAuthTest
 {
diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITTLSTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITTLSTest.java
index e7bc731..93b4879 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITTLSTest.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITTLSTest.java
@@ -42,6 +42,7 @@ import org.apache.druid.server.security.TLSUtils;
 import org.apache.druid.testing.IntegrationTestingConfig;
 import org.apache.druid.testing.guice.DruidTestModuleFactory;
 import org.apache.druid.testing.utils.ITTLSCertificateChecker;
+import org.apache.druid.tests.TestNGGroup;
 import org.jboss.netty.handler.codec.http.HttpMethod;
 import org.jboss.netty.handler.codec.http.HttpResponseStatus;
 import org.joda.time.Duration;
@@ -55,6 +56,7 @@ import javax.ws.rs.core.MediaType;
 import java.io.IOException;
 import java.net.URL;
 
+@Test(groups = TestNGGroup.SECURITY)
 @Guice(moduleFactory = DruidTestModuleFactory.class)
 public class ITTLSTest
 {
diff --git a/pom.xml b/pom.xml
index dca5d24..f5271d7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1130,7 +1130,7 @@
                 <version>1.4.1</version>
                 <executions>
                     <execution>
-                        <id>enforce-banned-dependencies</id>
+                        <id>default-cli</id>
                         <goals>
                             <goal>enforce</goal>
                         </goals>
@@ -1296,7 +1296,7 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-resources-plugin</artifactId>
-                    <version>2.5</version>
+                    <version>3.1.0</version>
                 </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
@@ -1331,7 +1331,7 @@
                 <plugin>
                     <groupId>org.codehaus.mojo</groupId>
                     <artifactId>exec-maven-plugin</artifactId>
-                    <version>1.2.1</version>
+                    <version>1.6.0</version>
                 </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
diff --git a/web-console/pom.xml b/web-console/pom.xml
index 8ee53b3..cec4036 100644
--- a/web-console/pom.xml
+++ b/web-console/pom.xml
@@ -33,6 +33,7 @@
 
   <properties>
     <resources.directory>${project.build.directory}/resources</resources.directory>
+    <druid.console.skip>false</druid.console.skip>  <!-- this property is overidden in Travis CI to skip the javascript-related work -->
   </properties>
 
   <build>
@@ -40,6 +41,9 @@
       <plugin>
         <groupId>com.github.eirslett</groupId>
         <artifactId>frontend-maven-plugin</artifactId>
+        <configuration>
+          <skip>${druid.console.skip}</skip>
+        </configuration>
         <executions>
           <execution>
             <id>install-node-and-npm</id>
@@ -100,6 +104,9 @@
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>exec-maven-plugin</artifactId>
+        <configuration>
+          <skip>${druid.console.skip}</skip>
+        </configuration>
         <executions>
           <execution>
             <id>clean-console</id>
@@ -141,6 +148,7 @@
         <artifactId>maven-resources-plugin</artifactId>
         <configuration>
           <outputDirectory>${project.build.outputDirectory}/org/apache/druid/console</outputDirectory>
+          <skip>${druid.console.skip}</skip>
         </configuration>
       </plugin>
     </plugins>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org