You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by dz...@apache.org on 2022/07/20 17:13:43 UTC

[drill] branch 1.20 updated (da7f2da01c -> 162bc85105)

This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a change to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git


    from da7f2da01c DRILL-8227: JConnect and jTDS JDBC drivers do not implement Connection::getSchema
     new 00d51f461d Try and reduce the vm crash on fork
     new 65ca8ff649 DRILL-8255: Update Drill-Calcite version to include fix for CALCITE-4992 (#2591)
     new ae02cf5abb DRILL-8249: Parquet decoding error reading nation.dict.parquet from test framework (#2594)
     new 031e60ca7b DRILL-8256: Fix unit tests of Kerberos auth in RPC (#2592)
     new abce595278 DRILL-8257: Resolve Netty lib conflicts (#2593)
     new 94917849f9 DRILL-8182: File scan nodes not differentiated by format config (#2583)
     new d6ad3d9486 DRILL-8263: upgrade libpam4j due to CVE
     new 4ae61b7dfb Bump aws-java-sdk-s3 from 1.12.211 to 1.12.261 in /distribution
     new 5218a74fc8 DRILL-8264: remove xalan dependency
     new 162bc85105 DRILL-8266: Number narrowing issues (#2608)

The 10 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/ci.yml                           |  22 +-
 .travis.yml                                        |   2 +-
 ...{DruidStorageTest.java => EasyOutOfMemory.java} |   9 +-
 .../org/apache/drill/categories/package-info.java  |   2 +
 .../drill/exec/store/excel/TestExcelFormat.java    |  30 ++
 .../resources/excel/test_cross_sheet_join.xlsx     | Bin 0 -> 6426 bytes
 contrib/format-maprdb/pom.xml                      |  40 ---
 .../cassandra/schema/CassandraDynamicTable.java    |   3 +-
 .../drill/exec/store/druid/DruidScanSpec.java      |   8 +-
 .../elasticsearch/ElasticsearchStorageConfig.java  |  38 ++-
 .../schema/ElasticsearchDynamicTable.java          |   3 +-
 .../elasticsearch/ElasticComplexTypesTest.java     |   3 +-
 .../store/elasticsearch/ElasticInfoSchemaTest.java |   3 +-
 .../store/elasticsearch/ElasticSearchPlanTest.java |   3 +-
 .../elasticsearch/ElasticSearchQueryTest.java      |   3 +-
 contrib/storage-hbase/pom.xml                      |  16 -
 .../exec/store/hbase/AbstractHBaseDrillTable.java  |   3 +-
 .../drill/exec/store/hbase/HBaseScanSpec.java      |  20 +-
 .../drill/hbase/TestHBaseFilterPushDown.java       |  44 +--
 .../org/apache/drill/hbase/TestTableGenerator.java |   4 +-
 contrib/storage-hive/core/pom.xml                  |   8 -
 .../drill/exec/store/hive/HiveReadEntry.java       |  17 +-
 .../apache/drill/exec/store/http/HttpScanSpec.java |   8 +-
 .../drill/exec/store/kafka/KafkaScanSpec.java      |  13 +-
 .../apache/drill/exec/store/kudu/KuduScanSpec.java |  16 +-
 .../drill/exec/store/mongo/MongoScanSpec.java      |   8 +-
 .../exec/store/openTSDB/OpenTSDBScanSpec.java      |  15 +-
 .../drill/exec/store/splunk/SplunkScanSpec.java    |   8 +-
 .../apache/drill/exec/udfs/NetworkFunctions.java   |   4 +-
 distribution/pom.xml                               |   8 +-
 exec/java-exec/pom.xml                             |  62 +---
 .../base/AbstractGroupScanWithMetadata.java        |   2 +-
 .../physical/impl/common/HashTableTemplate.java    |   2 +-
 .../planner/FileSystemPartitionDescriptor.java     |  10 +-
 .../drill/exec/planner/logical/DrillTable.java     |  17 +-
 ...illConditions.java => DrillTableSelection.java} |  26 +-
 .../exec/planner/logical/DynamicDrillTable.java    |   6 +-
 ...TableScan.java => SelectionBasedTableScan.java} |  16 +-
 .../planner/logical/partition/PruneScanRule.java   |   5 +
 .../apache/drill/exec/record/RecordBatchSizer.java |   4 +-
 .../drill/exec/record/VectorInitializer.java       |   2 +-
 .../rpc/security/AuthenticatorProviderImpl.java    |   7 +-
 .../org/apache/drill/exec/ssl/SSLConfigServer.java |   4 +
 .../apache/drill/exec/store/dfs/FileSelection.java |   8 +-
 .../drill/exec/store/dfs/FormatSelection.java      |  13 +-
 .../drill/exec/store/dfs/easy/EasyGroupScan.java   |   8 +
 .../exec/store/ischema/InfoSchemaTableType.java    |   8 +-
 .../drill/exec/store/mock/MockStorageEngine.java   |  21 +-
 .../apache/drill/exec/store/mock/MockTableDef.java |  30 +-
 .../store/parquet/columnreaders/ColumnReader.java  |   2 +-
 .../columnreaders/NullableColumnReader.java        |   4 +-
 .../exec/store/plan/rel/PluginDrillTable.java      |   3 +-
 .../drill/exec/store/sys/StaticDrillTable.java     |   3 +-
 .../apache/drill/exec/store/sys/SystemTable.java   |   8 +-
 .../filereader/BufferedDirectBufInputStream.java   |  16 +-
 .../drill/exec/DrillSeparatePlanningTest.java      |   4 +-
 .../drill/exec/fn/impl/TestAggregateFunctions.java |   3 +-
 .../physical/impl/writer/TestParquetWriter.java    |   5 -
 .../exec/physical/impl/writer/TestWriter.java      |   2 +-
 .../drill/exec/rpc/data/TestBitBitKerberos.java    | 371 ++++++++-------------
 .../rpc/user/security/TestUserBitKerberos.java     |   3 +-
 .../security/TestUserBitKerberosEncryption.java    |   3 +-
 .../drill/exec/server/TestDrillbitResilience.java  |   2 +-
 .../rest/spnego/TestDrillSpnegoAuthenticator.java  |   4 +-
 .../rest/spnego/TestSpnegoAuthentication.java      |   2 +-
 .../exec/server/rest/spnego/TestSpnegoConfig.java  |   4 +-
 .../exec/store/enumerable/plan/EnumMockTable.java  |   3 +-
 .../java/org/apache/drill/test/ProfileParser.java  |  33 +-
 .../java/org/apache/drill/test/QueryBuilder.java   |  28 +-
 .../conv/conversionTestWithLogicalPlan.json        |  50 +--
 .../src/test/resources/scan_screen_logical.json    |  18 +-
 exec/jdbc/pom.xml                                  |   6 +-
 exec/rpc/pom.xml                                   |  20 --
 logical/pom.xml                                    |  10 +
 metastore/iceberg-metastore/pom.xml                |   4 -
 metastore/metastore-api/pom.xml                    |   8 -
 pom.xml                                            | 313 ++---------------
 77 files changed, 633 insertions(+), 911 deletions(-)
 copy common/src/test/java/org/apache/drill/categories/{DruidStorageTest.java => EasyOutOfMemory.java} (79%)
 create mode 100644 contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx
 copy exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/{DrillConditions.java => DrillTableSelection.java} (52%)
 rename exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/{DirPrunedEnumerableTableScan.java => SelectionBasedTableScan.java} (82%)


[drill] 03/10: DRILL-8249: Parquet decoding error reading nation.dict.parquet from test framework (#2594)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit ae02cf5abb5ff4d8153fd27eb59c4801630364dd
Author: James Turton <91...@users.noreply.github.com>
AuthorDate: Wed Jul 13 06:53:23 2022 +0200

    DRILL-8249: Parquet decoding error reading nation.dict.parquet from test framework (#2594)
    
    The usingDictionary flag may be updated by the Parquet page reader
    and become true even though the column chunk metadata indicates no
    use of a dictionary page.
---
 .../src/main/java/org/apache/drill/exec/ssl/SSLConfigServer.java      | 4 ++++
 .../apache/drill/exec/store/parquet/columnreaders/ColumnReader.java   | 2 +-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfigServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfigServer.java
index e184a91f49..e3a1ca6516 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfigServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfigServer.java
@@ -306,6 +306,10 @@ public class SSLConfigServer extends SSLConfig {
 
   @Override
   public int getHandshakeTimeout() {
+    // A value of 0 is interpreted by Netty as "no timeout". This is hard coded
+    // here instead being read from {@link ExecConstants.SSL_HANDSHAKE_TIMEOUT}
+    // because the SSL handshake timeout is managed from the client end only
+    // (see {@link SSLConfigClient}).
     return 0;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
index 1bc62b18dc..3cd63aeea4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReader.java
@@ -223,7 +223,7 @@ public abstract class ColumnReader<V extends ValueVector> {
   }
 
   protected boolean recordsRequireDecoding() {
-    return !Collections.disjoint(VALUE_ENCODINGS, columnChunkMetaData.getEncodings());
+    return usingDictionary || !Collections.disjoint(VALUE_ENCODINGS, columnChunkMetaData.getEncodings());
   }
 
   protected boolean processPageData(int recordsToReadInThisPass) throws IOException {


[drill] 08/10: Bump aws-java-sdk-s3 from 1.12.211 to 1.12.261 in /distribution

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 4ae61b7dfbf494de44ed29fa43640672010b2522
Author: dependabot[bot] <49...@users.noreply.github.com>
AuthorDate: Tue Jul 19 19:56:34 2022 +0000

    Bump aws-java-sdk-s3 from 1.12.211 to 1.12.261 in /distribution
    
    Bumps [aws-java-sdk-s3](https://github.com/aws/aws-sdk-java) from 1.12.211 to 1.12.261.
    - [Release notes](https://github.com/aws/aws-sdk-java/releases)
    - [Changelog](https://github.com/aws/aws-sdk-java/blob/master/CHANGELOG.md)
    - [Commits](https://github.com/aws/aws-sdk-java/compare/1.12.211...1.12.261)
    
    ---
    updated-dependencies:
    - dependency-name: com.amazonaws:aws-java-sdk-s3
      dependency-type: direct:production
    ...
    
    Signed-off-by: dependabot[bot] <su...@github.com>
---
 distribution/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distribution/pom.xml b/distribution/pom.xml
index 0f4b4a48bc..e8b1bf722f 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -31,7 +31,7 @@
   <name>Drill : Packaging and Distribution Assembly</name>
 
   <properties>
-    <aws.java.sdk.version>1.12.211</aws.java.sdk.version>
+    <aws.java.sdk.version>1.12.261</aws.java.sdk.version>
     <oci.hdfs.version>3.3.1.0.3.6</oci.hdfs.version>
   </properties>
 


[drill] 02/10: DRILL-8255: Update Drill-Calcite version to include fix for CALCITE-4992 (#2591)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 65ca8ff649a730ae7a3f504e7024d6526ece209b
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Tue Jul 12 07:23:43 2022 +0300

    DRILL-8255: Update Drill-Calcite version to include fix for CALCITE-4992 (#2591)
---
 .../elasticsearch/ElasticsearchStorageConfig.java  | 38 ++++++++++++++++++----
 .../elasticsearch/ElasticComplexTypesTest.java     |  3 +-
 .../store/elasticsearch/ElasticInfoSchemaTest.java |  3 +-
 .../store/elasticsearch/ElasticSearchPlanTest.java |  3 +-
 .../elasticsearch/ElasticSearchQueryTest.java      |  3 +-
 pom.xml                                            |  2 +-
 6 files changed, 41 insertions(+), 11 deletions(-)

diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/ElasticsearchStorageConfig.java b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/ElasticsearchStorageConfig.java
index 9e84635076..8d1f50da17 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/ElasticsearchStorageConfig.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/ElasticsearchStorageConfig.java
@@ -30,6 +30,7 @@ import org.apache.drill.common.logical.security.CredentialsProvider;
 import org.apache.drill.exec.store.security.UsernamePasswordCredentials;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
 
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -40,22 +41,41 @@ public class ElasticsearchStorageConfig extends AbstractSecuredStoragePluginConf
 
   private static final ObjectWriter OBJECT_WRITER = new ObjectMapper().writerFor(List.class);
 
+  private static final String HOSTS = "hosts";
+
+  private static final String PATH_PREFIX = "pathPrefix";
+
+  private static final String USERNAME = "username";
+
+  private static final String PASSWORD = "password";
+
+  public static final String CREDENTIALS_PROVIDER = "credentialsProvider";
+
+  private static final String EMPTY_STRING = "";
+
   private final List<String> hosts;
+  private final String pathPrefix;
 
   @JsonCreator
   public ElasticsearchStorageConfig(
-      @JsonProperty("hosts") List<String> hosts,
-      @JsonProperty("username") String username,
-      @JsonProperty("password") String password,
-      @JsonProperty("credentialsProvider") CredentialsProvider credentialsProvider) {
+      @JsonProperty(HOSTS) List<String> hosts,
+      @JsonProperty(USERNAME) String username,
+      @JsonProperty(PASSWORD) String password,
+      @JsonProperty(PATH_PREFIX) String pathPrefix,
+      @JsonProperty(CREDENTIALS_PROVIDER) CredentialsProvider credentialsProvider) {
     super(CredentialProviderUtils.getCredentialsProvider(username, password, credentialsProvider), credentialsProvider == null);
     this.hosts = hosts;
+    this.pathPrefix = pathPrefix;
   }
 
   public List<String> getHosts() {
     return hosts;
   }
 
+  public String getPathPrefix() {
+    return pathPrefix;
+  }
+
   public String getUsername() {
     if (directCredentials) {
       return getUsernamePasswordCredentials().getUsername();
@@ -78,10 +98,16 @@ public class ElasticsearchStorageConfig extends AbstractSecuredStoragePluginConf
   @JsonIgnore
   public Map<String, Object> toConfigMap()
       throws JsonProcessingException {
+    Map<String, String> credentials = new HashMap<>(credentialsProvider.getCredentials());
     ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
-    builder.put("hosts", OBJECT_WRITER.writeValueAsString(hosts));
+    builder.put(HOSTS, OBJECT_WRITER.writeValueAsString(hosts));
+    builder.put(PATH_PREFIX, pathPrefix != null ? pathPrefix : EMPTY_STRING);
+    builder.put(USERNAME, credentials.getOrDefault(USERNAME, EMPTY_STRING));
+    builder.put(PASSWORD, credentials.getOrDefault(PASSWORD, EMPTY_STRING));
 
-    builder.putAll(credentialsProvider.getCredentials());
+    credentials.remove(USERNAME);
+    credentials.remove(PASSWORD);
+    builder.putAll(credentials);
     return builder.build();
   }
 
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
index 9f4f52dafa..f777d38481 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
@@ -56,7 +56,8 @@ public class ElasticComplexTypesTest extends ClusterTest {
     startCluster(ClusterFixture.builder(dirTestWatcher));
 
     ElasticsearchStorageConfig config = new ElasticsearchStorageConfig(
-        Collections.singletonList(TestElasticsearchSuite.getAddress()), null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
+        Collections.singletonList(TestElasticsearchSuite.getAddress()),
+        null, null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
     config.setEnabled(true);
     cluster.defineStoragePlugin("elastic", config);
 
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
index aa4dae47b9..4edd3ed77a 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
@@ -51,7 +51,8 @@ public class ElasticInfoSchemaTest extends ClusterTest {
     startCluster(ClusterFixture.builder(dirTestWatcher));
 
     ElasticsearchStorageConfig config = new ElasticsearchStorageConfig(
-        Collections.singletonList(TestElasticsearchSuite.getAddress()), null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
+        Collections.singletonList(TestElasticsearchSuite.getAddress()),
+        null, null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
     config.setEnabled(true);
     cluster.defineStoragePlugin("elastic", config);
 
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
index db81edf592..c654d3fa1a 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
@@ -49,7 +49,8 @@ public class ElasticSearchPlanTest extends ClusterTest {
     startCluster(ClusterFixture.builder(dirTestWatcher));
 
     ElasticsearchStorageConfig config = new ElasticsearchStorageConfig(
-        Collections.singletonList(TestElasticsearchSuite.getAddress()), null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
+        Collections.singletonList(TestElasticsearchSuite.getAddress()),
+        null, null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
     config.setEnabled(true);
     cluster.defineStoragePlugin("elastic", config);
 
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
index 374c449107..727e8314dc 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
@@ -57,7 +57,8 @@ public class ElasticSearchQueryTest extends ClusterTest {
     startCluster(ClusterFixture.builder(dirTestWatcher));
 
     ElasticsearchStorageConfig config = new ElasticsearchStorageConfig(
-        Collections.singletonList(TestElasticsearchSuite.getAddress()), null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
+        Collections.singletonList(TestElasticsearchSuite.getAddress()),
+        null, null, null, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
     config.setEnabled(true);
     cluster.defineStoragePlugin("elastic", config);
 
diff --git a/pom.xml b/pom.xml
index bc845a5ab5..fc4a343c91 100644
--- a/pom.xml
+++ b/pom.xml
@@ -60,7 +60,7 @@
       avoid_bad_dependencies plugin found in the file.
     -->
     <calcite.groupId>com.github.vvysotskyi.drill-calcite</calcite.groupId>
-    <calcite.version>1.21.0-drill-r8</calcite.version>
+    <calcite.version>1.21.0-drill-r9</calcite.version>
     <avatica.version>1.17.0</avatica.version>
     <janino.version>3.0.11</janino.version>
     <sqlline.version>1.12.0</sqlline.version>


[drill] 07/10: DRILL-8263: upgrade libpam4j due to CVE

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit d6ad3d94861caee48fa4b768abc4672877633d3b
Author: PJ Fanning <pj...@users.noreply.github.com>
AuthorDate: Tue Jul 19 20:45:17 2022 +0100

    DRILL-8263: upgrade libpam4j due to CVE
---
 exec/java-exec/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 6cc5216a61..125edd30dd 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -29,7 +29,7 @@
   <name>Drill : Exec : Java Execution Engine</name>
 
   <properties>
-    <libpam4j.version>1.8-rev2</libpam4j.version>
+    <libpam4j.version>1.11</libpam4j.version>
     <aether.version>1.1.0</aether.version>
     <wagon.version>3.3.4</wagon.version>
     <okhttp.version>4.9.3</okhttp.version>


[drill] 04/10: DRILL-8256: Fix unit tests of Kerberos auth in RPC (#2592)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 031e60ca7b11cbd4490f9357c3cc889f07772982
Author: James Turton <91...@users.noreply.github.com>
AuthorDate: Wed Jul 13 16:25:30 2022 +0200

    DRILL-8256: Fix unit tests of Kerberos auth in RPC (#2592)
---
 .../rpc/security/AuthenticatorProviderImpl.java    |   7 +-
 .../drill/exec/rpc/data/TestBitBitKerberos.java    | 371 ++++++++-------------
 .../rpc/user/security/TestUserBitKerberos.java     |   3 +-
 .../security/TestUserBitKerberosEncryption.java    |   3 +-
 .../rest/spnego/TestDrillSpnegoAuthenticator.java  |   4 +-
 .../rest/spnego/TestSpnegoAuthentication.java      |   2 +-
 .../exec/server/rest/spnego/TestSpnegoConfig.java  |   4 +-
 .../java/org/apache/drill/test/QueryBuilder.java   |  28 +-
 8 files changed, 175 insertions(+), 247 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
index 60ffdec1d6..178a185ead 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
@@ -45,8 +45,11 @@ public class AuthenticatorProviderImpl implements AuthenticatorProvider {
 
   @SuppressWarnings("unchecked")
   public AuthenticatorProviderImpl(final DrillConfig config, final ScanResult scan) throws DrillbitStartupException {
-    // Skip auth mechanisms setup if user authentication is disabled
-    if (!config.getBoolean(ExecConstants.USER_AUTHENTICATION_ENABLED)) {
+    // Skip auth mechanisms setup if no authentication is enabled
+    if (
+      !config.getBoolean(ExecConstants.USER_AUTHENTICATION_ENABLED) &&
+      !config.getBoolean(ExecConstants.BIT_AUTHENTICATION_ENABLED)
+    ) {
       return;
     }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
index 7571efb296..65b0351dce 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/data/TestBitBitKerberos.java
@@ -21,13 +21,15 @@ import org.apache.drill.exec.proto.BitData;
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigValueFactory;
+import org.apache.drill.test.BaseDirTestWatcher;
+import org.apache.drill.test.ClientFixture;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.apache.drill.test.ClusterTest;
+
 import io.netty.buffer.ByteBuf;
 import org.apache.drill.exec.ops.FragmentContextImpl;
-import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.scanner.ClassPathScanner;
@@ -60,19 +62,14 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.work.WorkManager.WorkerBee;
 import org.apache.drill.exec.work.fragment.FragmentExecutor;
 import org.apache.drill.exec.work.fragment.FragmentManager;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
 import java.io.IOException;
-import java.lang.reflect.Field;
 import java.util.List;
-import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -81,52 +78,28 @@ import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
-@Ignore("See DRILL-5387")
 @Category(SecurityTest.class)
-public class TestBitBitKerberos extends BaseTestQuery {
+public class TestBitBitKerberos extends ClusterTest {
   private static KerberosHelper krbHelper;
-  private static DrillConfig newConfig;
 
   private int port = 1234;
 
   @BeforeClass
   public static void setupTest() throws Exception {
-
-    final Config config = DrillConfig.create(cloneDefaultTestConfigProperties());
-
     krbHelper = new KerberosHelper(TestBitBitKerberos.class.getSimpleName(), null);
-    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
-
-    newConfig = new DrillConfig(
-        config.withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-            ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos")))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-            ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-            ConfigValueFactory.fromAnyRef("kerberos"))
-        .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.SERVICE_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-        .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-            ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())));
-
-    // Ignore the compile time warning caused by the code below.
-
-    // Config is statically initialized at this point. But the above configuration results in a different
-    // initialization which causes the tests to fail. So the following two changes are required.
-
-    // (1) Refresh Kerberos config.
-    // This disabled call to an unsupported internal API does not appear to be
-    // required and it prevents compiling with a target of JDK 8 on newer JDKs.
-    // sun.security.krb5.Config.refresh();
-
-    // (2) Reset the default realm.
-    final Field defaultRealm = KerberosName.class.getDeclaredField("defaultRealm");
-    defaultRealm.setAccessible(true);
-    defaultRealm.set(null, KerberosUtil.getDefaultRealm());
-
-    updateTestCluster(1, newConfig);
+    krbHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
+    cluster = defaultClusterConfig().build();
+  }
+
+  private static ClusterFixtureBuilder defaultClusterConfig() {
+    return ClusterFixture.bareBuilder(dirTestWatcher)
+      .clusterSize(1)
+      .configNonStringProperty(ExecConstants.AUTHENTICATION_MECHANISMS, Lists.newArrayList("kerberos"))
+      .configProperty(ExecConstants.BIT_AUTHENTICATION_ENABLED, true)
+      .configProperty(ExecConstants.BIT_AUTHENTICATION_MECHANISM, "kerberos")
+      .configProperty(ExecConstants.USE_LOGIN_PRINCIPAL, true)
+      .configProperty(ExecConstants.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL)
+      .configProperty(ExecConstants.SERVICE_KEYTAB_LOCATION, krbHelper.serverKeytab.toString());
   }
 
   private FragmentManager setupFragmentContextAndManager(BufferAllocator allocator) {
@@ -188,23 +161,9 @@ public class TestBitBitKerberos extends BaseTestQuery {
     final WorkerBee bee = mock(WorkerBee.class);
     final WorkEventBus workBus = mock(WorkEventBus.class);
 
-    newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-        .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-          ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos")))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-          ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-          ConfigValueFactory.fromAnyRef("kerberos"))
-        .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-          ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.SERVICE_PRINCIPAL,
-          ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-        .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-          ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())));
-
-    final ScanResult result = ClassPathScanner.fromPrescan(newConfig);
+    final ScanResult result = ClassPathScanner.fromPrescan(cluster.config());
     final BootStrapContext c1 =
-      new BootStrapContext(newConfig, SystemOptionManager.createDefaultOptionDefinitions(), result);
+      new BootStrapContext(cluster.config(), SystemOptionManager.createDefaultOptionDefinitions(), result);
 
     final FragmentManager manager = setupFragmentContextAndManager(c1.getAllocator());
     when(workBus.getFragmentManager(Mockito.<FragmentHandle>any())).thenReturn(manager);
@@ -242,51 +201,42 @@ public class TestBitBitKerberos extends BaseTestQuery {
 
     final WorkerBee bee = mock(WorkerBee.class);
     final WorkEventBus workBus = mock(WorkEventBus.class);
-    newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-      .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-        ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos")))
-      .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-        ConfigValueFactory.fromAnyRef(true))
-      .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-        ConfigValueFactory.fromAnyRef("kerberos"))
-      .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED,
-        ConfigValueFactory.fromAnyRef(true))
-      .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-        ConfigValueFactory.fromAnyRef(true))
-      .withValue(ExecConstants.SERVICE_PRINCIPAL,
-        ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-      .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-        ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())));
-
-    final ScanResult result = ClassPathScanner.fromPrescan(newConfig);
-    final BootStrapContext c2 =
-      new BootStrapContext(newConfig, SystemOptionManager.createDefaultOptionDefinitions(), result);
-
-    final FragmentManager manager = setupFragmentContextAndManager(c2.getAllocator());
-    when(workBus.getFragmentManager(Mockito.<FragmentHandle>any())).thenReturn(manager);
-
-    final DataConnectionConfig config =
-      new DataConnectionConfig(c2.getAllocator(), c2, new DataServerRequestHandler(workBus, bee));
-    final DataServer server = new DataServer(config);
-
-    port = server.bind(port, true);
-    DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build();
-    final DataConnectionManager connectionManager = new DataConnectionManager(ep, config);
-    final DataTunnel tunnel = new DataTunnel(connectionManager);
-    AtomicLong max = new AtomicLong(0);
-    try {
-      for (int i = 0; i < 40; i++) {
-        long t1 = System.currentTimeMillis();
-        tunnel.sendRecordBatch(new TimingOutcome(max),
-          new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1,
-            getRandomBatch(c2.getAllocator(), 5000)));
+    try (
+      ClusterFixture cluster = defaultClusterConfig()
+        .configProperty(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, true)
+        .build();
+    ) {
+
+      final ScanResult result = ClassPathScanner.fromPrescan(cluster.config());
+      final BootStrapContext c2 =
+        new BootStrapContext(cluster.config(), SystemOptionManager.createDefaultOptionDefinitions(), result);
+
+      final FragmentManager manager = setupFragmentContextAndManager(c2.getAllocator());
+      when(workBus.getFragmentManager(Mockito.<FragmentHandle>any())).thenReturn(manager);
+
+      final DataConnectionConfig config =
+        new DataConnectionConfig(c2.getAllocator(), c2, new DataServerRequestHandler(workBus, bee));
+      final DataServer server = new DataServer(config);
+
+      port = server.bind(port, true);
+      DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build();
+      final DataConnectionManager connectionManager = new DataConnectionManager(ep, config);
+      final DataTunnel tunnel = new DataTunnel(connectionManager);
+      AtomicLong max = new AtomicLong(0);
+      try {
+        for (int i = 0; i < 40; i++) {
+          long t1 = System.currentTimeMillis();
+          tunnel.sendRecordBatch(new TimingOutcome(max),
+            new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1,
+              getRandomBatch(c2.getAllocator(), 5000)));
+        }
+        assertTrue(max.get() > 2700);
+        Thread.sleep(5000);
+      } finally {
+        server.close();
+        connectionManager.close();
+        c2.close();
       }
-      assertTrue(max.get() > 2700);
-      Thread.sleep(5000);
-    } finally {
-      server.close();
-      connectionManager.close();
-      c2.close();
     }
   }
 
@@ -297,79 +247,58 @@ public class TestBitBitKerberos extends BaseTestQuery {
     final WorkerBee bee = mock(WorkerBee.class);
     final WorkEventBus workBus = mock(WorkEventBus.class);
 
-    newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-      .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-        ConfigValueFactory.fromIterable(Lists.newArrayList("kerberos")))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-          ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-          ConfigValueFactory.fromAnyRef("kerberos"))
-        .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED,
-          ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE,
-          ConfigValueFactory.fromAnyRef(100000))
-        .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-          ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.SERVICE_PRINCIPAL,
-          ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-        .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-          ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())));
-
-    final ScanResult result = ClassPathScanner.fromPrescan(newConfig);
-    final BootStrapContext c2 =
-      new BootStrapContext(newConfig, SystemOptionManager.createDefaultOptionDefinitions(), result);
-
-    final FragmentManager manager = setupFragmentContextAndManager(c2.getAllocator());
-    when(workBus.getFragmentManager(Mockito.<FragmentHandle>any())).thenReturn(manager);
+    try (
+      ClusterFixture cluster = defaultClusterConfig()
+        .configProperty(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, true)
+        .configProperty(ExecConstants.BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE, 100000)
+        .build();
+    ) {
 
-    final DataConnectionConfig config = new DataConnectionConfig(c2.getAllocator(), c2,
-      new DataServerRequestHandler(workBus, bee));
-    final DataServer server = new DataServer(config);
+      final ScanResult result = ClassPathScanner.fromPrescan(cluster.config());
+      final BootStrapContext c2 =
+        new BootStrapContext(cluster.config(), SystemOptionManager.createDefaultOptionDefinitions(), result);
 
-    port = server.bind(port, true);
-    final DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build();
-    final DataConnectionManager connectionManager = new DataConnectionManager(ep, config);
-    final DataTunnel tunnel = new DataTunnel(connectionManager);
-    AtomicLong max = new AtomicLong(0);
+      final FragmentManager manager = setupFragmentContextAndManager(c2.getAllocator());
+      when(workBus.getFragmentManager(Mockito.<FragmentHandle>any())).thenReturn(manager);
 
-    try {
-      for (int i = 0; i < 40; i++) {
-        long t1 = System.currentTimeMillis();
-        tunnel.sendRecordBatch(new TimingOutcome(max),
-          new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1,
-            getRandomBatch(c2.getAllocator(), 5000)));
+      final DataConnectionConfig config = new DataConnectionConfig(c2.getAllocator(), c2,
+        new DataServerRequestHandler(workBus, bee));
+      final DataServer server = new DataServer(config);
+
+      port = server.bind(port, true);
+      final DrillbitEndpoint ep = DrillbitEndpoint.newBuilder().setAddress("localhost").setDataPort(port).build();
+      final DataConnectionManager connectionManager = new DataConnectionManager(ep, config);
+      final DataTunnel tunnel = new DataTunnel(connectionManager);
+      AtomicLong max = new AtomicLong(0);
+
+      try {
+        for (int i = 0; i < 40; i++) {
+          long t1 = System.currentTimeMillis();
+          tunnel.sendRecordBatch(new TimingOutcome(max),
+            new FragmentWritableBatch(false, QueryId.getDefaultInstance(), 1, 1, 1, 1,
+              getRandomBatch(c2.getAllocator(), 5000)));
+        }
+        assertTrue(max.get() > 2700);
+        Thread.sleep(5000);
+      } catch (Exception | AssertionError ex) {
+        fail();
+      } finally {
+        server.close();
+        connectionManager.close();
+        c2.close();
       }
-      assertTrue(max.get() > 2700);
-      Thread.sleep(5000);
-    } catch (Exception | AssertionError ex) {
-      fail();
-    } finally {
-      server.close();
-      connectionManager.close();
-      c2.close();
     }
   }
 
   @Test
   public void failureEncryptionOnlyPlainMechanism() throws Exception {
-    try{
-      newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-        .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-          ConfigValueFactory.fromIterable(Lists.newArrayList("plain")))
-          .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-            ConfigValueFactory.fromAnyRef(true))
-          .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-            ConfigValueFactory.fromAnyRef("kerberos"))
-          .withValue(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED,
-            ConfigValueFactory.fromAnyRef(true))
-          .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(true))
-          .withValue(ExecConstants.SERVICE_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-          .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-            ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString())));
-
-      updateTestCluster(1, newConfig);
+    try {
+      defaultClusterConfig()
+        .configNonStringProperty(ExecConstants.AUTHENTICATION_MECHANISMS, Lists.newArrayList("plain"))
+        .configProperty(ExecConstants.BIT_ENCRYPTION_SASL_ENABLED, true)
+        .build()
+        .close();
+
       fail();
     } catch(Exception ex) {
       assertTrue(ex.getCause() instanceof DrillbitStartupException);
@@ -387,35 +316,25 @@ public class TestBitBitKerberos extends BaseTestQuery {
    */
   @Test
   public void localQuerySuccessWithWrongBitAuthConfig() throws Exception {
-
-    final Properties connectionProps = new Properties();
-    connectionProps.setProperty(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL);
-    connectionProps.setProperty(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL);
-    connectionProps.setProperty(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath());
-
-    newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-        .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED,
-            ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
-            ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE))
-        .withValue(ExecConstants.SERVICE_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-        .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-            ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString()))
-        .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-            ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos")))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-            ConfigValueFactory.fromAnyRef(true))
-        .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-            ConfigValueFactory.fromAnyRef("kerberos"))
-        .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-            ConfigValueFactory.fromAnyRef(false)));
-
-    updateTestCluster(1, newConfig, connectionProps);
-
-    // Run a query using the new client
-    final String query = getFile("queries/tpch/01.sql");
-    test(query);
+    try (
+      ClusterFixture cluster = defaultClusterConfig()
+        .configProperty(ExecConstants.USER_AUTHENTICATION_ENABLED, true)
+        .configProperty(ExecConstants.USER_AUTHENTICATOR_IMPL, UserAuthenticatorTestImpl.TYPE)
+        .configNonStringProperty(
+          ExecConstants.AUTHENTICATION_MECHANISMS,
+          Lists.newArrayList("plain", "kerberos")
+        )
+        .configProperty(ExecConstants.USE_LOGIN_PRINCIPAL, false)
+        .build();
+      ClientFixture client = cluster.clientBuilder()
+      .property(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL)
+      .property(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL)
+      .property(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath())
+      .build()
+    ) {
+      // Run a query using the new client
+      client.queryBuilder().sqlResource("queries/tpch/01.sql").run();
+    }
   }
 
   /**
@@ -430,36 +349,28 @@ public class TestBitBitKerberos extends BaseTestQuery {
    */
   @Test
   public void queryFailureWithWrongBitAuthConfig() throws Exception {
-    try{
-      final Properties connectionProps = new Properties();
-      connectionProps.setProperty(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL);
-      connectionProps.setProperty(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL);
-      connectionProps.setProperty(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath());
-
-      newConfig = new DrillConfig(DrillConfig.create(cloneDefaultTestConfigProperties())
-          .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED,
-              ConfigValueFactory.fromAnyRef(true))
-          .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
-              ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE))
-          .withValue(ExecConstants.SERVICE_PRINCIPAL,
-              ConfigValueFactory.fromAnyRef(krbHelper.SERVER_PRINCIPAL))
-          .withValue(ExecConstants.SERVICE_KEYTAB_LOCATION,
-              ConfigValueFactory.fromAnyRef(krbHelper.serverKeytab.toString()))
-          .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
-              ConfigValueFactory.fromIterable(Lists.newArrayList("plain", "kerberos")))
-          .withValue(ExecConstants.BIT_AUTHENTICATION_ENABLED,
-              ConfigValueFactory.fromAnyRef(true))
-          .withValue(ExecConstants.BIT_AUTHENTICATION_MECHANISM,
-              ConfigValueFactory.fromAnyRef("kerberos"))
-          .withValue(ExecConstants.USE_LOGIN_PRINCIPAL,
-              ConfigValueFactory.fromAnyRef(false)));
-
-      updateTestCluster(2, newConfig, connectionProps);
-
-      test("alter session set `planner.slice_target` = 10");
-      final String query = getFile("queries/tpch/01.sql");
-      test(query);
-      fail();
+    try {
+      try (
+        ClusterFixture cluster = defaultClusterConfig()
+          .clusterSize(2)
+          .configProperty(ExecConstants.USER_AUTHENTICATION_ENABLED, true)
+          .configProperty(ExecConstants.USER_AUTHENTICATOR_IMPL, UserAuthenticatorTestImpl.TYPE)
+          .configNonStringProperty(
+            ExecConstants.AUTHENTICATION_MECHANISMS,
+            Lists.newArrayList("plain", "kerberos")
+          )
+          .configProperty(ExecConstants.USE_LOGIN_PRINCIPAL, false)
+          .build();
+        ClientFixture client = cluster.clientBuilder()
+        .property(DrillProperties.SERVICE_PRINCIPAL, krbHelper.SERVER_PRINCIPAL)
+        .property(DrillProperties.USER, krbHelper.CLIENT_PRINCIPAL)
+        .property(DrillProperties.KEYTAB, krbHelper.clientKeytab.getAbsolutePath())
+        .build()
+      ) {
+        client.alterSession("planner.slice_target", 10);
+        client.queryBuilder().sqlResource("queries/tpch/01.sql").run();
+        fail();
+      }
     } catch(Exception ex) {
       assertTrue(ex instanceof UserRemoteException);
       assertTrue(((UserRemoteException)ex).getErrorType() == UserBitShared.DrillPBError.ErrorType.CONNECTION);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
index 755ae06174..0777ebdbe9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberos.java
@@ -27,6 +27,7 @@ import org.apache.drill.exec.rpc.data.DataRpcMetrics;
 import org.apache.drill.exec.rpc.security.KerberosHelper;
 import org.apache.drill.exec.rpc.user.UserRpcMetrics;
 import org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
@@ -51,7 +52,7 @@ public class TestUserBitKerberos extends ClusterTest {
   @BeforeClass
   public static void setupTest() throws Exception {
     krbHelper = new KerberosHelper(TestUserBitKerberos.class.getSimpleName(), null);
-    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
+    krbHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
     cluster = defaultClusterConfig().build();
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
index b0449d50be..7324af4692 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestUserBitKerberosEncryption.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.rpc.user.security;
 
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
@@ -56,7 +57,7 @@ public class TestUserBitKerberosEncryption extends ClusterTest {
   @BeforeClass
   public static void setupTest() throws Exception {
     krbHelper = new KerberosHelper(TestUserBitKerberosEncryption.class.getSimpleName(), null);
-    krbHelper.setupKdc(dirTestWatcher.getTmpDir());
+    krbHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
     cluster = defaultClusterConfig().build();
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
index 914688e9b2..ad93742dc3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
@@ -80,8 +80,8 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
 
   @BeforeClass
   public static void setupTest() throws Exception {
-    spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
-    spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());
+    spnegoHelper = new KerberosHelper(TestDrillSpnegoAuthenticator.class.getSimpleName(), primaryName);
+    spnegoHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
 
     // (1) Refresh Kerberos config.
     // This disabled call to an unsupported internal API does not appear to be
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoAuthentication.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoAuthentication.java
index fb79f173a2..0d3ae96e0a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoAuthentication.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoAuthentication.java
@@ -76,7 +76,7 @@ public class TestSpnegoAuthentication extends BaseTest {
   @BeforeClass
   public static void setupTest() throws Exception {
     spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
-    spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());
+    spnegoHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
 
     // (1) Refresh Kerberos config.
     // This disabled call to an unsupported internal API does not appear to be
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
index a91f802319..baaac985dc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
@@ -55,8 +55,8 @@ public class TestSpnegoConfig extends BaseTest {
 
   @BeforeClass
   public static void setupTest() throws Exception {
-    spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
-    spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());
+    spnegoHelper = new KerberosHelper(TestSpnegoConfig.class.getSimpleName(), primaryName);
+    spnegoHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
 
     // (1) Refresh Kerberos config.
     // This disabled call to an unsupported internal API does not appear to be
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
index fb1fe1549c..fda76d6125 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java
@@ -281,19 +281,31 @@ public class QueryBuilder {
   }
 
   /**
-   * Run a query contained in a resource file.
+   * Parse a single SQL statement (with optional ending semi-colon) from
+   * the resource provided.
    *
-   * @param resource Name of the resource
+   * @param resource the resource containing exactly one SQL statement, with
+   * optional ending semi-colon
    * @return this builder
    */
-  public QueryBuilder sqlResource(String resource) {
-    sql(ClusterFixture.loadResource(resource));
-    return this;
+  public QueryBuilder sqlResource(String resource) throws IOException {
+    String script = ClusterFixture.loadResource(resource);
+    StatementParser parser = new StatementParser(script);
+    String sql = parser.parseNext();
+    if (sql == null) {
+      throw new IllegalArgumentException("No query found");
+    }
+    return sql(sql);
   }
 
-  public QueryBuilder sqlResource(String resource, Object... args) {
-    sql(ClusterFixture.loadResource(resource), args);
-    return this;
+  public QueryBuilder sqlResource(String resource, Object... args) throws IOException {
+    String script = ClusterFixture.loadResource(resource);
+    StatementParser parser = new StatementParser(script);
+    String sql = parser.parseNext();
+    if (sql == null) {
+      throw new IllegalArgumentException("No query found");
+    }
+    return sql(sql, args);
   }
 
   public QueryBuilder physicalResource(String resource) {


[drill] 01/10: Try and reduce the vm crash on fork

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 00d51f461d7e1238c26027ad2218ba379583faea
Author: luocooong <lu...@apache.org>
AuthorDate: Sun Jul 3 20:02:40 2022 +0800

    Try and reduce the vm crash on fork
    
    Use the drop_caches and split unit tests
    
    Ready for the review
---
 .github/workflows/ci.yml                           | 22 ++++++++---------
 .travis.yml                                        |  2 +-
 .../apache/drill/categories/EasyOutOfMemory.java   | 28 ++++++++++++++++++++++
 .../org/apache/drill/categories/package-info.java  |  2 ++
 .../drill/exec/fn/impl/TestAggregateFunctions.java |  3 ++-
 .../physical/impl/writer/TestParquetWriter.java    |  5 ----
 .../drill/exec/server/TestDrillbitResilience.java  |  2 +-
 pom.xml                                            |  2 +-
 8 files changed, 45 insertions(+), 21 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 230fdb10c6..7988390440 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -46,22 +46,20 @@ jobs:
           key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
           restore-keys: |
               ${{ runner.os }}-maven-
-      # Caches MySQL directory used for JDBC storage plugin tests
-      - name: Cache MySQL
-        uses: actions/cache@v2
-        with:
-          path: ~/.embedmysql
-          key: ${{ runner.os }}-mysql
-      - name: Set up JDK ${{ matrix.java }}
-        uses: actions/setup-java@v2
-        with:
-          distribution: 'adopt'
-          java-version: ${{ matrix.java }}
       - name: Build and test
         # The total GitHub Actions memory is 7000Mb. But GitHub CI requires some memory for the container to perform tests
         run: |
           MAVEN_OPTS="-XX:+UseG1GC"
-          mvn install --batch-mode --no-transfer-progress -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 # -X -V for debugging
+          sudo sh -c 'echo 1 > /proc/sys/vm/drop_caches' && \
+          mvn install --batch-mode --no-transfer-progress \
+          -DexcludedGroups=org.apache.drill.categories.EasyOutOfMemory \
+          -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120
+      - name: Test Specific Categories # EasyOutOfMemory
+        run: |
+          sudo sh -c 'echo 1 > /proc/sys/vm/drop_caches' && \
+          mvn test -pl org.apache.drill.exec:drill-java-exec \
+          -Dgroups=org.apache.drill.categories.EasyOutOfMemory \
+          -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120
 
   checkstyle_protobuf:
     name: Run checkstyle and generate protobufs
diff --git a/.travis.yml b/.travis.yml
index 6461e8205d..6e576e1640 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -73,7 +73,7 @@ install:
   # For protobuf phase: builds Drill project, performs license checkstyle goal and regenerates Java and C++ Protobuf files
   - |
     if [ $PHASE = "tests" ]; then \
-      mvn install --batch-mode --no-transfer-progress -DforkCount=1 -DdirectMemoryMb=$DIRECTMEMORYMB \
+      mvn install --batch-mode --no-transfer-progress -DdirectMemoryMb=$DIRECTMEMORYMB \
         -DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest,org.apache.drill.categories.SecurityTest"; \
     elif [ $PHASE = "build_checkstyle_protobuf" ]; then \
       MAVEN_OPTS="-Xms1G -Xmx1G" mvn install --no-transfer-progress -Drat.skip=false -Dlicense.skip=false --batch-mode -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DskipTests=true -Dmaven.javadoc.skip=true -Dmaven.source.skip=true && \
diff --git a/common/src/test/java/org/apache/drill/categories/EasyOutOfMemory.java b/common/src/test/java/org/apache/drill/categories/EasyOutOfMemory.java
new file mode 100644
index 0000000000..2dd29456ab
--- /dev/null
+++ b/common/src/test/java/org/apache/drill/categories/EasyOutOfMemory.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.categories;
+
+/**
+ * Split the unit test for EasyOutOfMemory categories.<br/>
+ * Note: If you use this category to mark the test class,
+ * the CI will use a new clean JVM to test them.
+ *
+ */
+public interface EasyOutOfMemory extends FlakyTest {
+
+}
diff --git a/common/src/test/java/org/apache/drill/categories/package-info.java b/common/src/test/java/org/apache/drill/categories/package-info.java
index bb699c14ba..4f80760fa9 100644
--- a/common/src/test/java/org/apache/drill/categories/package-info.java
+++ b/common/src/test/java/org/apache/drill/categories/package-info.java
@@ -25,6 +25,8 @@
  *   <li>{@link org.apache.drill.categories.SlowTest}: These tests run slowly, and are disabled by default.</li>
  *   <li>{@link org.apache.drill.categories.UnlikelyTest}: These tests represent corner cases, specific bug fixes, or tests for pieces of code that are unlikely to be changed.
  *   These are disabled by default</li>
+ *   <li>{@link org.apache.drill.categories.FlakyTest}: A category for tests that intermittently fail.</li>
+ *   <li>{@link org.apache.drill.categories.EasyOutOfMemory}: Inherited class FlakyTest and allow the CI tool uses a new JVM to test the unit.</li>
  * </ul>
  * </p>
  * <p>
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
index 182c983583..11be3bc3c6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestAggregateFunctions.java
@@ -31,6 +31,7 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.util.Text;
+import org.apache.drill.categories.EasyOutOfMemory;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.categories.SqlFunctionTest;
@@ -68,7 +69,7 @@ import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category({SqlFunctionTest.class, OperatorTest.class, PlannerTest.class})
+@Category({ SqlFunctionTest.class, OperatorTest.class, PlannerTest.class, EasyOutOfMemory.class })
 public class TestAggregateFunctions extends ClusterTest {
 
   @Rule
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index 80ba94418b..28b009e869 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -52,9 +52,6 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import org.junit.jupiter.api.condition.DisabledOnOs;
-import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
-import org.junit.jupiter.api.condition.OS;
 
 import java.io.File;
 import java.io.FileWriter;
@@ -1003,8 +1000,6 @@ public class TestParquetWriter extends ClusterTest {
   // Only attempt this test on Linux / amd64 because com.rdblue.brotli-codec
   // only bundles natives for Mac and Linux on AMD64.  See PARQUET-1975.
   @Test
-  @EnabledIfSystemProperty(named = "os.arch", matches = "(amd64|x86_64)")
-  @DisabledOnOs({ OS.WINDOWS })
   public void testTPCHReadWriteBrotli() throws Exception {
     try {
       client.alterSession(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, "brotli");
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
index 564112e1fc..3f66fd67a5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/TestDrillbitResilience.java
@@ -117,7 +117,7 @@ public class TestDrillbitResilience extends ClusterTest {
   private static final int NUM_RUNS = 3;
   private static final int PROBLEMATIC_TEST_NUM_RUNS = 3;
   private static final int TIMEOUT = 15;
-  private final static Level CURRENT_LOG_LEVEL = Level.DEBUG;
+  private final static Level CURRENT_LOG_LEVEL = Level.INFO;
 
   /**
    * Note: Counting sys.memory executes a fragment on every drillbit. This is a better check in comparison to
diff --git a/pom.xml b/pom.xml
index 27385ae964..bc845a5ab5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -115,7 +115,7 @@
     <codemodel.version>2.6</codemodel.version>
     <joda.version>2.10.5</joda.version>
     <javax.el.version>3.0.0</javax.el.version>
-    <surefire.version>3.0.0-M5</surefire.version>
+    <surefire.version>3.0.0-M7</surefire.version>
     <jna.version>5.8.0</jna.version>
     <commons.compress.version>1.21</commons.compress.version>
     <hikari.version>4.0.3</hikari.version>


[drill] 05/10: DRILL-8257: Resolve Netty lib conflicts (#2593)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit abce5952784bf3aa5f743c9b263a51326a4e8fb6
Author: James Turton <91...@users.noreply.github.com>
AuthorDate: Wed Jul 13 06:55:10 2022 +0200

    DRILL-8257: Resolve Netty lib conflicts (#2593)
    
    Introduce Netty BOM and remove all Netty exclusions from POMs.
---
 contrib/format-maprdb/pom.xml       |  40 ------
 contrib/storage-hbase/pom.xml       |  16 ---
 contrib/storage-hive/core/pom.xml   |   8 --
 distribution/pom.xml                |   6 +-
 exec/java-exec/pom.xml              |  56 --------
 exec/rpc/pom.xml                    |  20 ---
 logical/pom.xml                     |  10 ++
 metastore/iceberg-metastore/pom.xml |   4 -
 metastore/metastore-api/pom.xml     |   8 --
 pom.xml                             | 268 +-----------------------------------
 10 files changed, 18 insertions(+), 418 deletions(-)

diff --git a/contrib/format-maprdb/pom.xml b/contrib/format-maprdb/pom.xml
index 1c09b45817..a2285ece89 100644
--- a/contrib/format-maprdb/pom.xml
+++ b/contrib/format-maprdb/pom.xml
@@ -43,14 +43,6 @@
         <artifactId>hbase-client</artifactId>
         <version>${mapr-format-plugin.hbase.version}</version>
         <exclusions>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
           <exclusion>
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
@@ -185,14 +177,6 @@
           <artifactId>log4j-over-slf4j</artifactId>
           <groupId>org.slf4j</groupId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -239,14 +223,6 @@
       <classifier>tests</classifier>
       <scope>test</scope>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -260,14 +236,6 @@
           <artifactId>log4j-over-slf4j</artifactId>
           <groupId>org.slf4j</groupId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -277,14 +245,6 @@
       <classifier>tests</classifier>
       <scope>test</scope>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
 
diff --git a/contrib/storage-hbase/pom.xml b/contrib/storage-hbase/pom.xml
index 8a9a5a7db1..53625d7462 100644
--- a/contrib/storage-hbase/pom.xml
+++ b/contrib/storage-hbase/pom.xml
@@ -177,14 +177,6 @@
               <groupId>org.apache.hadoop</groupId>
               <artifactId>hadoop-mapreduce-client-core</artifactId>
             </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty-all</artifactId>
-            </exclusion>
             <exclusion>
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
@@ -257,14 +249,6 @@
           <groupId>org.apache.hbase</groupId>
           <artifactId>hbase-client</artifactId>
           <exclusions>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty-all</artifactId>
-            </exclusion>
             <exclusion>
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
diff --git a/contrib/storage-hive/core/pom.xml b/contrib/storage-hive/core/pom.xml
index c6c5d6b67b..de1ec20885 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -212,14 +212,6 @@
           <groupId>org.mortbay.jetty</groupId>
           <artifactId>servlet-api-2.5</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
         <exclusion>
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
diff --git a/distribution/pom.xml b/distribution/pom.xml
index 4bad4fa409..0f4b4a48bc 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -32,7 +32,7 @@
 
   <properties>
     <aws.java.sdk.version>1.12.211</aws.java.sdk.version>
-    <oci.hdfs.version>3.3.0.7.0.1</oci.hdfs.version>
+    <oci.hdfs.version>3.3.1.0.3.6</oci.hdfs.version>
   </properties>
 
   <dependencies>
@@ -190,10 +190,6 @@
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-client</artifactId>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index bcdf8c1d74..6cc5216a61 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -393,14 +393,6 @@
           <groupId>commons-codec</groupId>
           <artifactId>commons-codec</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -421,14 +413,6 @@
           <groupId>log4j</groupId>
           <artifactId>log4j</artifactId>
         </exclusion>
-        <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-        </exclusion>
         <exclusion>
           <groupId>org.eclipse.jetty</groupId>
           <artifactId>jetty-server</artifactId>
@@ -475,14 +459,6 @@
       <scope>test</scope>
       <classifier>tests</classifier>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
         <exclusion>
           <groupId>commons-codec</groupId>
           <artifactId>commons-codec</artifactId>
@@ -494,14 +470,6 @@
       <artifactId>hadoop-hdfs</artifactId>
       <scope>test</scope>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
         <exclusion>
           <groupId>commons-codec</groupId>
           <artifactId>commons-codec</artifactId>
@@ -540,14 +508,6 @@
           <groupId>org.xerial.snappy</groupId>
           <artifactId>snappy-java</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -680,14 +640,6 @@
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
             </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty-all</artifactId>
-            </exclusion>
           </exclusions>
         </dependency>
       </dependencies>
@@ -747,14 +699,6 @@
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
             </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-            </exclusion>
-            <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty-all</artifactId>
-            </exclusion>
           </exclusions>
         </dependency>
       </dependencies>
diff --git a/exec/rpc/pom.xml b/exec/rpc/pom.xml
index 138b353ff8..f6e5c4415a 100644
--- a/exec/rpc/pom.xml
+++ b/exec/rpc/pom.xml
@@ -56,26 +56,6 @@
       <classifier>linux-x86_64</classifier>
       <version>${netty.version}</version>
       <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-handler</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-common</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-buffer</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-transport</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-parent</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
diff --git a/logical/pom.xml b/logical/pom.xml
index 7e8fb5776e..21a12a377c 100644
--- a/logical/pom.xml
+++ b/logical/pom.xml
@@ -85,6 +85,16 @@
         <artifactId>joda-time</artifactId>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>commons-codec</groupId>
+          <artifactId>commons-codec</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
   </dependencies>
 
 
diff --git a/metastore/iceberg-metastore/pom.xml b/metastore/iceberg-metastore/pom.xml
index 706859355d..8630e147aa 100644
--- a/metastore/iceberg-metastore/pom.xml
+++ b/metastore/iceberg-metastore/pom.xml
@@ -113,10 +113,6 @@
           <groupId>org.apache.avro</groupId>
           <artifactId>avro</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
         <exclusion>
           <groupId>org.slf4j</groupId>
           <artifactId>slf4j-api</artifactId>
diff --git a/metastore/metastore-api/pom.xml b/metastore/metastore-api/pom.xml
index f61a95a937..4e8cde1b44 100644
--- a/metastore/metastore-api/pom.xml
+++ b/metastore/metastore-api/pom.xml
@@ -58,14 +58,6 @@
           <groupId>commons-codec</groupId>
           <artifactId>commons-codec</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty-all</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
   </dependencies>
diff --git a/pom.xml b/pom.xml
index fc4a343c91..f32c48bb9f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1406,14 +1406,6 @@
             <groupId>org.xerial.snappy</groupId>
             <artifactId>snappy-java</artifactId>
           </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1425,14 +1417,6 @@
             <groupId>org.mortbay.jetty</groupId>
             <artifactId>servlet-api</artifactId>
           </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1489,14 +1473,6 @@
             <groupId>org.apache.curator</groupId>
             <artifactId>apache-curator</artifactId>
           </exclusion>
-          <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
           <exclusion>
             <groupId>javax.servlet</groupId>
             <artifactId>servlet-api</artifactId>
@@ -1595,14 +1571,6 @@
             <groupId>org.apache.hive</groupId>
             <artifactId>hive-common</artifactId>
           </exclusion>
-          <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1682,10 +1650,6 @@
         <artifactId>mapr-hbase</artifactId>
         <version>${mapr.release.version}</version>
         <exclusions>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
           <exclusion>
             <artifactId>log4j</artifactId>
             <groupId>log4j</groupId>
@@ -1718,14 +1682,6 @@
             <groupId>org.mortbay.jetty</groupId>
             <artifactId>servlet-api-2.5</artifactId>
           </exclusion>
-          <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
           <exclusion>
             <groupId>org.json</groupId>
             <artifactId>json</artifactId>
@@ -1753,14 +1709,6 @@
             <artifactId>slf4j-log4j12</artifactId>
             <groupId>org.slf4j</groupId>
           </exclusion>
-          <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
           <exclusion>
             <groupId>org.json</groupId>
             <artifactId>json</artifactId>
@@ -1781,14 +1729,6 @@
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
           </exclusion>
-          <exclusion>
-              <groupId>io.netty</groupId>
-              <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1893,14 +1833,6 @@
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
           </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-transport-native-epoll</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1926,14 +1858,6 @@
             <artifactId>log4j</artifactId>
             <groupId>log4j</groupId>
           </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-all</artifactId>
-          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -2076,13 +2000,19 @@
           </exclusion>
         </exclusions>
       </dependency>
-
       <dependency>
         <groupId>junit</groupId>
         <artifactId>junit</artifactId>
         <version>${junit4.version}</version>
         <scope>test</scope>
       </dependency>
+      <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty-bom</artifactId>
+        <version>${netty.version}</version>
+        <type>pom</type>
+        <scope>import</scope>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -2239,10 +2169,6 @@
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-jaxrs</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -2390,10 +2316,6 @@
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-jaxrs</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>commons-httpclient</groupId>
                 <artifactId>commons-httpclient</artifactId>
@@ -2418,14 +2340,6 @@
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>log4j</groupId>
                 <artifactId>log4j</artifactId>
@@ -2462,14 +2376,6 @@
                 <groupId>com.sun.jersey</groupId>
                 <artifactId>jersey-core</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-core-asl</artifactId>
@@ -2486,10 +2392,6 @@
             <artifactId>hadoop-client</artifactId>
             <version>${hadoop.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -2570,10 +2472,6 @@
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-jaxrs</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -2763,14 +2661,6 @@
             <artifactId>hbase-server</artifactId>
             <version>${hbase.version}</version>
             <exclusions>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>tomcat</groupId>
                 <artifactId>jasper-compiler</artifactId>
@@ -2840,14 +2730,6 @@
             <version>${hbase.version}</version>
             <scope>test</scope>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -3064,14 +2946,6 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-core</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>commons-httpclient</groupId>
                 <artifactId>commons-httpclient</artifactId>
@@ -3083,10 +2957,6 @@
             <artifactId>hadoop-client</artifactId>
             <version>${hadoop.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <artifactId>commons-logging</artifactId>
                 <groupId>commons-logging</groupId>
@@ -3167,14 +3037,6 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-core</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -3303,14 +3165,6 @@
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-core-asl</artifactId>
@@ -3350,14 +3204,6 @@
             <artifactId>hbase-client</artifactId>
             <version>${hbase.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -3409,14 +3255,6 @@
             <artifactId>hbase-server</artifactId>
             <version>${hbase.version}</version>
             <exclusions>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>tomcat</groupId>
                 <artifactId>jasper-compiler</artifactId>
@@ -3486,14 +3324,6 @@
             <version>${hbase.version}</version>
             <scope>test</scope>
             <exclusions>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -3612,14 +3442,6 @@
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>servlet-api-2.5</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -3643,14 +3465,6 @@
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>servlet-api-2.5</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -3748,14 +3562,6 @@
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>servlet-api-2.5</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -3779,14 +3585,6 @@
                 <groupId>org.mortbay.jetty</groupId>
                 <artifactId>servlet-api-2.5</artifactId>
               </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                  <groupId>io.netty</groupId>
-                  <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -4078,14 +3876,6 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-core</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>commons-httpclient</groupId>
                 <artifactId>commons-httpclient</artifactId>
@@ -4097,10 +3887,6 @@
             <artifactId>hadoop-client</artifactId>
             <version>${hadoop.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <artifactId>commons-logging</artifactId>
                 <groupId>commons-logging</groupId>
@@ -4181,14 +3967,6 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-core</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
             </exclusions>
           </dependency>
           <dependency>
@@ -4283,14 +4061,6 @@
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
               </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-core-asl</artifactId>
@@ -4334,14 +4104,6 @@
             <artifactId>hbase-client</artifactId>
             <version>${hbase.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -4393,14 +4155,6 @@
             <artifactId>hbase-server</artifactId>
             <version>${hbase.version}</version>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>tomcat</groupId>
                 <artifactId>jasper-compiler</artifactId>
@@ -4470,14 +4224,6 @@
             <version>${hbase.version}</version>
             <scope>test</scope>
             <exclusions>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-              </exclusion>
-              <exclusion>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-              </exclusion>
               <exclusion>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>


[drill] 10/10: DRILL-8266: Number narrowing issues (#2608)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 162bc851052deabd2134bfa22916bf17ebadd74c
Author: PJ Fanning <pj...@users.noreply.github.com>
AuthorDate: Wed Jul 20 06:15:49 2022 +0100

    DRILL-8266: Number narrowing issues (#2608)
---
 .../org/apache/drill/hbase/TestTableGenerator.java |  4 +--
 .../apache/drill/exec/udfs/NetworkFunctions.java   |  4 +--
 .../base/AbstractGroupScanWithMetadata.java        |  2 +-
 .../physical/impl/common/HashTableTemplate.java    |  2 +-
 .../apache/drill/exec/record/RecordBatchSizer.java |  4 +--
 .../drill/exec/record/VectorInitializer.java       |  2 +-
 .../columnreaders/NullableColumnReader.java        |  4 +--
 .../filereader/BufferedDirectBufInputStream.java   | 16 +++++------
 .../drill/exec/DrillSeparatePlanningTest.java      |  4 +--
 .../exec/physical/impl/writer/TestWriter.java      |  2 +-
 .../java/org/apache/drill/test/ProfileParser.java  | 33 +++++++++++-----------
 11 files changed, 38 insertions(+), 39 deletions(-)

diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
index 5e14e09706..b66d584d32 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java
@@ -462,7 +462,7 @@ public class TestTableGenerator {
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
 
-    for (float i = (float)0.5; i <= 100.00; i += 0.75) {
+    for (float i = 0.5f; i <= 100.00; i += 0.75f) {
       byte[] bytes = new byte[5];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
       OrderedBytes.encodeFloat32(br, i,Order.ASCENDING);
@@ -586,7 +586,7 @@ public class TestTableGenerator {
 
     BufferedMutator table = conn.getBufferedMutator(tableName);
 
-    for (float i = (float)0.5; i <= 100.00; i += 0.75) {
+    for (float i = 0.5f; i <= 100.00; i += 0.75f) {
       byte[] bytes = new byte[5];
       PositionedByteRange br = new SimplePositionedMutableByteRange(bytes, 0, 5);
       OrderedBytes.encodeFloat32(br, i, Order.DESCENDING);
diff --git a/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
index 0dbaf87a1d..fcbd39f5fa 100644
--- a/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
@@ -431,7 +431,7 @@ public class NetworkFunctions {
         int power = 3 - i;
         try {
           int ip = Integer.parseInt(ipAddressInArray[i]);
-          result += ip * Math.pow(256, power);
+          result += Math.round(ip * Math.pow(256, power));
         } catch (NumberFormatException e) {
           // should not happen since we validated the address
           // but if does, return null
@@ -509,4 +509,4 @@ public class NetworkFunctions {
       out.value = validator.isValidInet6Address(ipString) ? 1 : 0;
     }
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractGroupScanWithMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractGroupScanWithMetadata.java
index eb7d4da3ae..a11f834761 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractGroupScanWithMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractGroupScanWithMetadata.java
@@ -545,7 +545,7 @@ public abstract class AbstractGroupScanWithMetadata<P extends TableMetadataProvi
    */
   protected <T extends BaseMetadata> List<T> limitMetadata(Collection<T> metadataList, int maxRecords) {
     List<T> qualifiedMetadata = new ArrayList<>();
-    int currentRowCount = 0;
+    long currentRowCount = 0;
     for (T metadata : metadataList) {
       long rowCount = TableStatisticsKind.ROW_COUNT.getValue(metadata);
       if (rowCount == Statistic.NO_COLUMN_STATS) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index c93de9ef47..26ffde908a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -862,7 +862,7 @@ public abstract class HashTableTemplate implements HashTable {
         bh.dump(startIdx);
       }
     }
-    resizingTime += System.currentTimeMillis() - t0;
+    resizingTime += Math.toIntExact(System.currentTimeMillis() - t0);
     numResizing++;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
index 4f5f02c578..70f874dfab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
@@ -466,7 +466,7 @@ public class RecordBatchSizer {
     private void allocateMap(AbstractMapVector map, int recordCount) {
       if (map instanceof AbstractRepeatedMapVector) {
         ((AbstractRepeatedMapVector) map).allocateOffsetsNew(recordCount);
-          recordCount *= getEntryCardinalityForAlloc();
+          recordCount *= Math.round(getEntryCardinalityForAlloc());
         }
 
       for (ValueVector vector : map) {
@@ -476,7 +476,7 @@ public class RecordBatchSizer {
 
     private void allocateRepeatedList(RepeatedListVector vector, int recordCount) {
       vector.allocateOffsetsNew(recordCount);
-      recordCount *= getEntryCardinalityForAlloc();
+      recordCount *= Math.round(getEntryCardinalityForAlloc());
       ColumnSize child = children.get(vector.getField().getName());
       if (vector.getDataVector() != null) {
         child.allocateVector(vector.getDataVector(), recordCount);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorInitializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorInitializer.java
index 83c0142158..9dc15c1a7f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorInitializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorInitializer.java
@@ -140,7 +140,7 @@ public class VectorInitializer {
       if (hint == null) {
         recordCount *= 10;
       } else {
-        recordCount *= hint.elementCount;
+        recordCount *= Math.round(hint.elementCount);
       }
     }
     prefix += map.getField().getName() + ".";
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableColumnReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableColumnReader.java
index 4e9121fd22..67531f2cf0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableColumnReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableColumnReader.java
@@ -173,7 +173,7 @@ abstract class NullableColumnReader<V extends ValueVector> extends ColumnReader<
         pageReader.readPosInBytes = readStartInBytes + readLength;
       }
 
-      pageReader.valuesRead += recordsReadInThisIteration;
+      pageReader.valuesRead += Math.toIntExact(recordsReadInThisIteration);
 
       totalValuesRead += runLength + nullRunLength;
 
@@ -287,7 +287,7 @@ abstract class NullableColumnReader<V extends ValueVector> extends ColumnReader<
         pageReader.readPosInBytes = readStartInBytes + readLength;
       }
 
-      pageReader.valuesRead += recordsReadInThisIteration;
+      pageReader.valuesRead += Math.toIntExact(recordsReadInThisIteration);
       totalValuesRead += numNonNullValues + numNullValues;
       currPageValuesProcessed += numNonNullValues + numNullValues;
       valueCount += numNonNullValues + numNullValues;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/filereader/BufferedDirectBufInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/filereader/BufferedDirectBufInputStream.java
index cfcb073580..c32ef55ebe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/filereader/BufferedDirectBufInputStream.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/filereader/BufferedDirectBufInputStream.java
@@ -104,8 +104,8 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
     super(in, allocator, id, startOffset, totalByteSize, enforceTotalByteSize, enableHints);
     Preconditions.checkArgument(bufSize >= 0);
     // We make the buffer size the smaller of the buffer Size parameter or the total Byte Size
-    // rounded to next highest pwoer of two
-    int bSize = bufSize < (int) totalByteSize ? bufSize : (int) totalByteSize;
+    // rounded to next highest power of two
+    int bSize = Math.min(bufSize, Math.toIntExact(totalByteSize));
     // round up to next power of 2
     bSize--;
     bSize |= bSize >>> 1;
@@ -216,7 +216,7 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
     }
     bytesAvailable = this.count - this.curPosInBuffer;
     //copy into output buffer
-    int copyBytes = bytesAvailable < len ? bytesAvailable : len;
+    int copyBytes = Math.min(bytesAvailable, len);
     getBuf().getBytes(curPosInBuffer, buf, off, copyBytes);
     buf.writerIndex(off + copyBytes);
     this.curPosInBuffer += copyBytes;
@@ -241,7 +241,7 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
     }
     bytesAvailable = this.count - this.curPosInBuffer;
     // return a slice as the  output
-    int bytesToRead = bytesAvailable < len ? bytesAvailable : len;
+    int bytesToRead = Math.min(bytesAvailable, len);
     DrillBuf newBuf = this.getBuf().slice(off, bytesToRead);
     newBuf.retain();
     return newBuf;
@@ -297,7 +297,7 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
 
 
   @Override public int read(byte[] b) throws IOException {
-    return b.length == 1 ? read() : read(b, (int) 0, b.length);
+    return b.length == 1 ? read() : read(b, 0, b.length);
   }
 
 
@@ -358,8 +358,8 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
         return 0;
       }
     }
-    bytesSkipped = bytesAvailable < n ? bytesAvailable : n;
-    this.curPosInBuffer += bytesSkipped;
+    bytesSkipped = Math.min(bytesAvailable, n);
+    this.curPosInBuffer += Math.toIntExact(bytesSkipped);
 
     return bytesSkipped;
   }
@@ -404,8 +404,6 @@ public class BufferedDirectBufInputStream extends DirectBufInputStream implement
           in = null;
           inp.close();
         }
-      } catch (IOException e) {
-        throw e;
       } finally {
         if ((buffer = this.internalBuffer) != null) {
           this.internalBuffer = null;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
index 90e9e0054b..5ee2435a71 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
@@ -176,7 +176,7 @@ public class DrillSeparatePlanningTest extends ClusterTest {
   }
 
   private int getResultsHelper(final QueryPlanFragments planFragments) throws Exception {
-    int totalRows = 0;
+    long totalRows = 0;
     for (PlanFragment fragment : planFragments.getFragmentsList()) {
       DrillbitEndpoint assignedNode = fragment.getAssignment();
       ClientFixture fragmentClient = cluster.client(assignedNode.getAddress(), assignedNode.getUserPort());
@@ -198,6 +198,6 @@ public class DrillSeparatePlanningTest extends ClusterTest {
       totalRows += summary.recordCount();
       fragmentClient.close();
     }
-    return totalRows;
+    return Math.toIntExact(totalRows);
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
index 2a6e1d56b5..5881ad540e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
@@ -149,7 +149,7 @@ public class TestWriter extends BaseTestQuery {
 
     RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
 
-    int recordsWritten = 0;
+    long recordsWritten = 0;
     for (QueryDataBatch batch : results) {
       batchLoader.load(batch.getHeader().getDef(), batch.getData());
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ProfileParser.java b/exec/java-exec/src/test/java/org/apache/drill/test/ProfileParser.java
index 88ba9a08c7..e7d4f037b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ProfileParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ProfileParser.java
@@ -129,7 +129,7 @@ public class ProfileParser {
 
     public void parsePlans(String plan) {
       plans = new ArrayList<>();
-      String parts[] = plan.split("\n");
+      String[] parts = plan.split("\n");
       for (String part : parts) {
         plans.add(part);
         OperatorSummary opDef = new OperatorSummary(part);
@@ -206,10 +206,10 @@ public class ProfileParser {
   }
 
   private static List<FieldDef> parseCols(String cols) {
-    String parts[] = cols.split(", ");
+    String[] parts = cols.split(", ");
     List<FieldDef> fields = new ArrayList<>();
     for (String part : parts) {
-      String halves[] = part.split(" ");
+      String[] halves = part.split(" ");
       fields.add(new FieldDef(halves[1], halves[0]));
     }
     return fields;
@@ -241,7 +241,7 @@ public class ProfileParser {
   private void aggregateOpers() {
     for (FragInfo major : fragments.values()) {
       for (OperatorSummary opDef : major.ops) {
-        int sumPeak = 0;
+        long sumPeak = 0;
         opDef.execCount = opDef.opExecs.size();
         for (OperatorProfile op : opDef.opExecs) {
           Preconditions.checkState(major.id == op.majorFragId);
@@ -263,7 +263,7 @@ public class ProfileParser {
 
   public void buildTree() {
     int currentLevel = 0;
-    OperatorSummary opStack[] = new OperatorSummary[topoOrder.size()];
+    OperatorSummary[] opStack = new OperatorSummary[topoOrder.size()];
     for (OperatorSummary opDef : topoOrder) {
       currentLevel = opDef.globalLevel;
       opStack[currentLevel] = opDef;
@@ -307,10 +307,10 @@ public class ProfileParser {
     Matcher m = p.matcher(plan);
     if (! m.find()) { return null; }
     String frag = m.group(1);
-    String parts[] = frag.split(", ");
+    String[] parts = frag.split(", ");
     List<FieldDef> fields = new ArrayList<>();
     for (String part : parts) {
-      String halves[] = part.split(" ");
+      String[] halves = part.split(" ");
       fields.add(new FieldDef(halves[1], halves[0]));
     }
     return fields;
@@ -463,10 +463,11 @@ public class ProfileParser {
     }
 
     public long getMetric(int id) {
-      JsonValue value = metrics.get(id);
+      JsonNumber value = metrics.get(id);
       if (value == null) {
-        return 0; }
-      return ((JsonNumber) value).longValue();
+        return 0;
+      }
+      return value.longValue();
     }
 
     @Override
@@ -673,9 +674,9 @@ public class ProfileParser {
       final StringBuilder nodeBuilder = new StringBuilder();
       nodeBuilder.append(String.format("%02d-%02d ", node.majorId, node.stepId));
       String indent = indentString(indentLevel, ". ");
-      nodeBuilder.append(indent + node.name);
+      nodeBuilder.append(indent).append(node.name);
       if (node.opName != null) {
-        nodeBuilder.append(" (" + node.opName + ")");
+        nodeBuilder.append(" (").append(node.opName).append(")");
       }
       logger.info(nodeBuilder.toString());
 
@@ -835,7 +836,7 @@ public class ProfileParser {
       }
       sb.append(node.name);
       if (node.opName != null) {
-        sb.append(" (" + node.opName + ")");
+        sb.append(" (").append(node.opName).append(")");
       }
       logger.info(sb.toString());
       printTimes(node, "  ");
@@ -854,9 +855,9 @@ public class ProfileParser {
       final StringBuilder sb = new StringBuilder();
       sb.append(String.format("%02d-%02d ", node.majorId, node.stepId));
       String indent = indentString(indentLevel, ". ");
-      sb.append(indent + node.name);
+      sb.append(indent).append(node.name);
       if (node.opName != null) {
-        sb.append(" (" + node.opName + ")");
+        sb.append(" (").append(node.opName).append(")");
       }
       logger.info(sb.toString());
       indent = indentString(15);
@@ -911,7 +912,7 @@ public class ProfileParser {
   public static long percent(long value, long total) {
     if (total == 0) {
       return 0; }
-    return Math.round(value * 100 / total);
+    return Math.round(value * 100 / (double)total);
   }
 
   public List<OperatorSummary> getOpDefn(String target) {


[drill] 06/10: DRILL-8182: File scan nodes not differentiated by format config (#2583)

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 94917849f93465e19726a29e0fbbc76a3483fcee
Author: James Turton <91...@users.noreply.github.com>
AuthorDate: Fri Jul 8 08:03:26 2022 +0200

    DRILL-8182: File scan nodes not differentiated by format config (#2583)
---
 .../drill/exec/store/excel/TestExcelFormat.java    |  30 +++++++++++++
 .../resources/excel/test_cross_sheet_join.xlsx     | Bin 0 -> 6426 bytes
 .../cassandra/schema/CassandraDynamicTable.java    |   3 +-
 .../drill/exec/store/druid/DruidScanSpec.java      |   8 +++-
 .../schema/ElasticsearchDynamicTable.java          |   3 +-
 .../exec/store/hbase/AbstractHBaseDrillTable.java  |   3 +-
 .../drill/exec/store/hbase/HBaseScanSpec.java      |  20 ++++++---
 .../drill/hbase/TestHBaseFilterPushDown.java       |  44 +++++++++---------
 .../drill/exec/store/hive/HiveReadEntry.java       |  17 ++++++-
 .../apache/drill/exec/store/http/HttpScanSpec.java |   8 +++-
 .../drill/exec/store/kafka/KafkaScanSpec.java      |  13 +++++-
 .../apache/drill/exec/store/kudu/KuduScanSpec.java |  16 ++++++-
 .../drill/exec/store/mongo/MongoScanSpec.java      |   8 +++-
 .../exec/store/openTSDB/OpenTSDBScanSpec.java      |  15 +++++--
 .../drill/exec/store/splunk/SplunkScanSpec.java    |   8 +++-
 .../planner/FileSystemPartitionDescriptor.java     |  10 +++--
 .../drill/exec/planner/logical/DrillTable.java     |  17 ++++---
 .../exec/planner/logical/DrillTableSelection.java  |  36 +++++++++++++++
 .../exec/planner/logical/DynamicDrillTable.java    |   6 +--
 ...TableScan.java => SelectionBasedTableScan.java} |  16 +++----
 .../planner/logical/partition/PruneScanRule.java   |   5 +++
 .../apache/drill/exec/store/dfs/FileSelection.java |   8 +++-
 .../drill/exec/store/dfs/FormatSelection.java      |  13 +++++-
 .../drill/exec/store/dfs/easy/EasyGroupScan.java   |   8 ++++
 .../exec/store/ischema/InfoSchemaTableType.java    |   8 +++-
 .../drill/exec/store/mock/MockStorageEngine.java   |  21 ++++-----
 .../apache/drill/exec/store/mock/MockTableDef.java |  30 +++++++++++--
 .../exec/store/plan/rel/PluginDrillTable.java      |   3 +-
 .../drill/exec/store/sys/StaticDrillTable.java     |   3 +-
 .../apache/drill/exec/store/sys/SystemTable.java   |   8 +++-
 .../exec/store/enumerable/plan/EnumMockTable.java  |   3 +-
 .../conv/conversionTestWithLogicalPlan.json        |  50 +++++++++++----------
 .../src/test/resources/scan_screen_logical.json    |  18 ++++----
 pom.xml                                            |   2 +-
 34 files changed, 342 insertions(+), 119 deletions(-)

diff --git a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
index 9faf64e95e..f4fc80a80f 100644
--- a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
+++ b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
@@ -780,4 +780,34 @@ public class TestExcelFormat extends ClusterTest {
 
     new RowSetComparison(expected).verifyAndClearAll(results);
   }
+
+  // DRILL-8182
+  @Test
+  public void testTableFuncsThatDifferOnlyByFormatConfig() throws Exception {
+    String sql = "WITH prod AS (" +
+      " SELECT id, name FROM table(cp.`excel/test_cross_sheet_join.xlsx` (type=> 'excel', sheetName => 'products'))" +
+      "), cust AS (" +
+      " SELECT id, name FROM table(cp.`excel/test_cross_sheet_join.xlsx` (type=> 'excel', sheetName => 'customers'))" +
+      ")" +
+      "SELECT prod.*, cust.* from prod JOIN cust ON prod.id = cust.id";
+
+    RowSet results = client.queryBuilder().sql(sql).rowSet();
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+      .addNullable("id", MinorType.FLOAT8)
+      .addNullable("name", MinorType.VARCHAR)
+      .addNullable("id0", MinorType.FLOAT8)
+      .addNullable("name0", MinorType.VARCHAR)
+      .buildSchema();
+
+    RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+      .addRow(1.0, "Doughnut", 1.0, "Alice")
+      .addRow(2.0, "Coffee", 2.0, "Bob")
+      .addRow(3.0, "Coke", 3.0, "Carol")
+      .addRow(4.0, "Cheesecake", 4.0, "Dave")
+      .addRow(5.0, "Popsicle", 5.0, "Eve")
+      .build();
+
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
 }
diff --git a/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx b/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx
new file mode 100644
index 0000000000..b057361229
Binary files /dev/null and b/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx differ
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
index 8b1d58d899..7d5c4022ba 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
@@ -34,6 +34,7 @@ import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.schema.TranslatableTable;
 import org.apache.calcite.schema.Wrapper;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.StoragePlugin;
 
 import java.lang.reflect.Type;
@@ -44,7 +45,7 @@ public class CassandraDynamicTable extends DrillTable implements TranslatableTab
 
   private final CassandraTable table;
 
-  public CassandraDynamicTable(StoragePlugin plugin, String storageEngineName, Object selection, CassandraTable table) {
+  public CassandraDynamicTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection, CassandraTable table) {
     super(storageEngineName, plugin, selection);
     this.table = table;
   }
diff --git a/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java b/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
index dcd74315ef..f4e6114664 100755
--- a/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
+++ b/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
@@ -21,9 +21,10 @@ package org.apache.drill.exec.store.druid;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.druid.common.DruidFilter;
 
-public class DruidScanSpec {
+public class DruidScanSpec implements DrillTableSelection {
 
   private final String dataSourceName;
   private final long dataSourceSize;
@@ -84,4 +85,9 @@ public class DruidScanSpec {
       .field("filter", filter)
       .toString();
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
index a1919f5929..1e18fd3bc1 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
@@ -31,6 +31,7 @@ import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.schema.Table;
 import org.apache.calcite.schema.TranslatableTable;
 import org.apache.calcite.schema.Wrapper;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
 import org.apache.drill.exec.store.StoragePlugin;
 
@@ -40,7 +41,7 @@ public class ElasticsearchDynamicTable extends DynamicDrillTable implements Tran
 
   private final ElasticsearchTable table;
 
-  public ElasticsearchDynamicTable(StoragePlugin plugin, String storageEngineName, Object selection, Table table) {
+  public ElasticsearchDynamicTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection, Table table) {
     super(plugin, storageEngineName, selection);
     this.table = (ElasticsearchTable) table;
   }
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
index 93d87392e9..21df6b591c 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
@@ -22,6 +22,7 @@ import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
@@ -41,7 +42,7 @@ public abstract class AbstractHBaseDrillTable extends DrillTable {
 
   protected HTableDescriptor tableDesc;
 
-  public AbstractHBaseDrillTable(String storageEngineName, StoragePlugin plugin, Object selection) {
+  public AbstractHBaseDrillTable(String storageEngineName, StoragePlugin plugin, DrillTableSelection selection) {
     super(storageEngineName, plugin, selection);
   }
 
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
index 793d924f43..cfcf7402e4 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
@@ -18,6 +18,8 @@
 package org.apache.drill.exec.store.hbase;
 
 
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -26,7 +28,7 @@ import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
-public class HBaseScanSpec {
+public class HBaseScanSpec implements DrillTableSelection {
 
   protected String tableName;
   protected byte[] startRow;
@@ -87,10 +89,16 @@ public class HBaseScanSpec {
 
   @Override
   public String toString() {
-    return "HBaseScanSpec [tableName=" + tableName
-        + ", startRow=" + (startRow == null ? null : Bytes.toStringBinary(startRow))
-        + ", stopRow=" + (stopRow == null ? null : Bytes.toStringBinary(stopRow))
-        + ", filter=" + (filter == null ? null : filter.toString())
-        + "]";
+    return new PlanStringBuilder(this)
+      .field("tableName", tableName)
+      .field("startRow", startRow == null ? null : Bytes.toStringBinary(startRow))
+      .field("stopRow", stopRow == null ? null : Bytes.toStringBinary(stopRow))
+      .field("filter", filter == null ? null : filter.toString())
+      .toString();
+  }
+
+  @Override
+  public String digest() {
+    return toString();
   }
 }
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
index cccaeb1c0c..edba290fc3 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
@@ -38,7 +38,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 1);
 
-    final String[] expectedPlan = {".*startRow=b4, stopRow=b4\\\\x00, filter=null.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\", stopRow=\"b4\\\\x00\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -56,7 +56,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 7);
 
-    final String[] expectedPlan = {".*startRow=, stopRow=, filter=RowFilter \\(NOT_EQUAL, b4\\).*"};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"RowFilter \\(NOT_EQUAL, b4\\)\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -74,7 +74,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 1);
 
-    final String[] expectedPlan = {".*startRow=b4, stopRow=b4\\\\x00, filter=null.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\", stopRow=\"b4\\\\x00\".*"};
     final String[] excludedPlan ={".*startRow=null, stopRow=null.*"};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -489,7 +489,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 21);
 
-    final String[] expectedPlan = {".*filter=FilterList OR.*EQUAL.*EQUAL.*"};
+    final String[] expectedPlan = {".*filter=\"FilterList OR.*EQUAL.*EQUAL.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -507,7 +507,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=\\%_AS_PREFIX_, stopRow=\\%_AS_PREFIX`, filter=RowFilter.*EQUAL.*"};
+    final String[] expectedPlan = {".*startRow=\"\\%_AS_PREFIX_\", stopRow=\"\\%_AS_PREFIX`\", filter=\"RowFilter.*EQUAL.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -525,7 +525,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 22);
 
-    final String[] expectedPlan = {".*startRow=07, stopRow=09, filter=FilterList AND.*RowFilter \\(GREATER_OR_EQUAL, 07\\), RowFilter \\(LESS, 09\\), SingleColumnValueFilter \\(f, c, EQUAL.*"};
+    final String[] expectedPlan = {".*startRow=\"07\", stopRow=\"09\", filter=\"FilterList AND.*RowFilter \\(GREATER_OR_EQUAL, 07\\), RowFilter \\(LESS, 09\\), SingleColumnValueFilter \\(f, c, EQUAL.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -543,7 +543,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 4);
 
-    final String[] expectedPlan = {".*startRow=b4\\\\x00.*stopRow=,.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -561,7 +561,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=b4\\\\x00.*stopRow=, filter=null.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\\\\x00\".*stopRow=.*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -579,7 +579,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 3);
 
-    final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*"};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -597,7 +597,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 3);
 
-    final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*"};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -615,7 +615,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 5);
 
-    final String[] expectedPlan = {".*startRow=, stopRow=, filter=FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*"};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -631,7 +631,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
         + "WHERE\n"
         + "  (row_key >= 'b5' OR row_key <= 'a2') AND (t.f.c1 >= '1' OR t.f.c1 is null)";
 
-    final String[] expectedPlan = {".*startRow=, stopRow=, filter=FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*"};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -649,7 +649,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 4);
 
-    final String[] expectedPlan = {".*startRow=b4\\\\x00, stopRow=,.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -667,7 +667,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=b4\\\\x00, stopRow=,.*"};
+    final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -698,7 +698,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 4);
 
-    final String[] expectedPlan = {".*startRow=, stopRow=b4\\\\x00, filter=null.*"};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"b4\\\\x00\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -716,7 +716,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 4);
 
-    final String[] expectedPlan = {".*startRow=, stopRow=b4\\\\x00, filter=null.*"};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"b4\\\\x00\".*"};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -734,7 +734,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*"};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -753,7 +753,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*"};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -772,7 +772,7 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     runHBaseSQLVerifyCount(sql, 3);
 
-    final String[] expectedPlan = {".*startRow=a2, stopRow=b6\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), FilterList AND \\(2/2\\): \\[RowFilter \\(GREATER_OR_EQUAL, b5\\), RowFilter \\(LESS_OR_EQUAL, b6.*"};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b6\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), FilterList AND \\(2/2\\): \\[RowFilter \\(GREATER_OR_EQUAL, b5\\), RowFilter \\(LESS_OR_EQUAL, b6.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -808,9 +808,9 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
 
     String query = "select d from dfs.tmp.pd_view where d > date '2015-06-13' and d < DATE '2015-06-18'";
     String[] expectedPlan = {
-        "startRow=\\\\x00\\\\x00\\\\x01M\\\\xEF\\]\\\\xA0\\\\x00, " +
-        "stopRow=\\\\x00\\\\x00\\\\x01N\\\\x03\\\\xF7\\\\x10\\\\x00, " +
-        "filter=null"};
+      "startRow=\"\\\\x00\\\\x00\\\\x01M\\\\xEF\\]\\\\xA0\\\\x00\", " +
+      "stopRow=\"\\\\x00\\\\x00\\\\x01N\\\\x03\\\\xF7\\\\x10\\\\x00\""
+    };
     String[] excludedPlan ={"Filter\\("};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
 
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
index d8bf75072b..7710a2b42a 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
@@ -21,6 +21,8 @@ import java.util.List;
 
 import org.apache.calcite.schema.Schema.TableType;
 
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.hive.HiveTableWrapper.HivePartitionWrapper;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
@@ -28,7 +30,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
-public class HiveReadEntry {
+public class HiveReadEntry implements DrillTableSelection {
 
   @JsonProperty("table")
   public HiveTableWrapper table;
@@ -93,5 +95,18 @@ public class HiveReadEntry {
 
     return partitionPath;
   }
+
+  @Override
+  public String toString() {
+    return new PlanStringBuilder(this)
+      .field("tableName", table)
+      .field("partitions", partitions)
+      .toString();
+  }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
 
diff --git a/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java b/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
index e43490c8f5..22c056e7f3 100644
--- a/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
+++ b/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
@@ -24,10 +24,11 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import org.apache.drill.common.PlanStringBuilder;
 import org.apache.drill.exec.oauth.PersistentTokenTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 
 @JsonTypeName("http-scan-spec")
-public class HttpScanSpec {
+public class HttpScanSpec implements DrillTableSelection {
 
   private final String pluginName;
   private final String connectionName;
@@ -100,4 +101,9 @@ public class HttpScanSpec {
       .field("config", config)
       .toString();
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
index d0590991cf..6bd88f84b9 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
@@ -19,8 +19,10 @@ package org.apache.drill.exec.store.kafka;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
-public class KafkaScanSpec {
+public class KafkaScanSpec implements DrillTableSelection {
   private final String topicName;
 
   @JsonCreator
@@ -34,6 +36,13 @@ public class KafkaScanSpec {
 
   @Override
   public String toString() {
-    return "KafkaScanSpec [topicName=" + topicName + "]";
+    return new PlanStringBuilder(this)
+      .field("topicName", topicName)
+      .toString();
+  }
+
+  @Override
+  public String digest() {
+    return toString();
   }
 }
diff --git a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
index 371cf2bb2a..78abdbefbd 100644
--- a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
+++ b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
@@ -20,8 +20,10 @@ package org.apache.drill.exec.store.kudu;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
-public class KuduScanSpec {
+public class KuduScanSpec implements DrillTableSelection {
 
   private final String tableName;
 
@@ -33,4 +35,16 @@ public class KuduScanSpec {
   public String getTableName() {
     return tableName;
   }
+
+  @Override
+  public String toString() {
+    return new PlanStringBuilder(this)
+      .field("tableName", tableName)
+      .toString();
+  }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
index 2c97785457..41426332c5 100644
--- a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
+++ b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
@@ -20,11 +20,12 @@ package org.apache.drill.exec.store.mongo;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
 import java.util.ArrayList;
 import java.util.List;
 
-public class MongoScanSpec {
+public class MongoScanSpec implements DrillTableSelection {
   private final String dbName;
   private final String collectionName;
 
@@ -71,4 +72,9 @@ public class MongoScanSpec {
       .field("operations", operations)
       .toString();
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
index f93758de9f..50931d1eb2 100644
--- a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
@@ -19,8 +19,10 @@ package org.apache.drill.exec.store.openTSDB;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
-public class OpenTSDBScanSpec {
+public class OpenTSDBScanSpec implements DrillTableSelection {
 
   private final String tableName;
 
@@ -35,8 +37,13 @@ public class OpenTSDBScanSpec {
 
   @Override
   public String toString() {
-    return "OpenTSDBScanSpec{" +
-            "tableName='" + tableName + '\'' +
-            '}';
+    return new PlanStringBuilder(this)
+      .field("tableName", tableName)
+      .toString();
+  }
+
+  @Override
+  public String digest() {
+    return toString();
   }
 }
diff --git a/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java b/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
index 513db63707..2d736bbd4b 100644
--- a/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
+++ b/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
@@ -22,9 +22,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
 @JsonTypeName("splunk-scan-spec")
-public class SplunkScanSpec {
+public class SplunkScanSpec implements DrillTableSelection {
   private final String pluginName;
   private final String indexName;
   private final SplunkPluginConfig config;
@@ -55,4 +56,9 @@ public class SplunkScanSpec {
       .field("indexName", indexName)
       .toString();
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
index 5fb84708a7..23e7ef827a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
@@ -41,7 +41,7 @@ import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.base.FileGroupScan;
-import org.apache.drill.exec.planner.logical.DirPrunedEnumerableTableScan;
+import org.apache.drill.exec.planner.logical.SelectionBasedTableScan;
 import org.apache.drill.exec.planner.logical.DrillRel;
 import org.apache.drill.exec.planner.logical.DrillScanRel;
 import org.apache.drill.exec.planner.logical.DrillTable;
@@ -252,8 +252,8 @@ public class FileSystemPartitionDescriptor extends AbstractPartitionDescriptor {
       RelOptTableImpl newOptTableImpl = RelOptTableImpl.create(relOptTable.getRelOptSchema(), relOptTable.getRowType(),
           newTable, GuavaUtils.convertToUnshadedImmutableList(relOptTable.getQualifiedName()));
 
-      // return an EnumerableTableScan with fileSelection being part of digest of TableScan node.
-      return DirPrunedEnumerableTableScan.create(scanRel.getCluster(), newOptTableImpl, newFileSelection.toString());
+      // return a SelectionBasedTableScan with fileSelection being part of digest of TableScan node.
+      return SelectionBasedTableScan.create(scanRel.getCluster(), newOptTableImpl, newFileSelection.toString());
     } else {
       throw new UnsupportedOperationException("Only DrillScanRel and EnumerableTableScan is allowed!");
     }
@@ -271,4 +271,8 @@ public class FileSystemPartitionDescriptor extends AbstractPartitionDescriptor {
     return selection instanceof FormatSelection
         && ((FormatSelection)selection).getSelection().getCacheFileRoot() != null;
   }
+
+  private static boolean supportsScan(TableScan scanRel) {
+    return scanRel instanceof SelectionBasedTableScan;
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
index 7a9dc0c081..4857d802d0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
@@ -20,7 +20,6 @@ package org.apache.drill.exec.planner.logical;
 import java.io.IOException;
 import java.util.Objects;
 
-import org.apache.calcite.adapter.enumerable.EnumerableTableScan;
 import org.apache.calcite.config.CalciteConnectionConfig;
 import org.apache.calcite.plan.RelOptTable;
 import org.apache.calcite.rel.RelNode;
@@ -47,7 +46,7 @@ public abstract class DrillTable implements Table {
   private final String storageEngineName;
   private final StoragePluginConfig storageEngineConfig;
   private final TableType tableType;
-  private final Object selection;
+  private final DrillTableSelection selection;
   private final StoragePlugin plugin;
   private final String userName;
   private GroupScan scan;
@@ -61,7 +60,7 @@ public abstract class DrillTable implements Table {
    * @param userName Whom to impersonate while reading the contents of the table.
    * @param selection Table contents (type and contents depend on type of StoragePlugin).
    */
-  public DrillTable(String storageEngineName, StoragePlugin plugin, String userName, Object selection) {
+  public DrillTable(String storageEngineName, StoragePlugin plugin, String userName, DrillTableSelection selection) {
     this(storageEngineName, plugin, TableType.TABLE, userName, selection);
   }
 
@@ -73,12 +72,12 @@ public abstract class DrillTable implements Table {
    * @param userName Whom to impersonate while reading the contents of the table.
    * @param selection Table contents (type and contents depend on type of StoragePlugin).
    */
-  public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, String userName, Object selection) {
+  public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, String userName, DrillTableSelection selection) {
     this(storageEngineName, plugin, tableType, userName, selection, null);
   }
 
   public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType,
-                    String userName, Object selection, MetadataProviderManager metadataProviderManager) {
+                    String userName, DrillTableSelection selection, MetadataProviderManager metadataProviderManager) {
     this.selection = selection;
     this.plugin = plugin;
 
@@ -95,7 +94,7 @@ public abstract class DrillTable implements Table {
    * process. Once we add impersonation to non-FileSystem storage plugins such as Hive, HBase etc,
    * we can remove this constructor.
    */
-  public DrillTable(String storageEngineName, StoragePlugin plugin, Object selection) {
+  public DrillTable(String storageEngineName, StoragePlugin plugin, DrillTableSelection selection) {
     this(storageEngineName, plugin, ImpersonationUtil.getProcessUserName(), selection);
   }
 
@@ -166,9 +165,9 @@ public abstract class DrillTable implements Table {
   }
 
   public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable table) {
-    // returns non-drill table scan to allow directory-based partition pruning
-    // before table group scan is created
-    return EnumerableTableScan.create(context.getCluster(), table);
+    // Returns non-drill table scan to allow directory-based partition pruning
+    // before table group scan is created.
+    return SelectionBasedTableScan.create(context.getCluster(), table, selection.digest());
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java
new file mode 100644
index 0000000000..b514a4995e
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+public interface DrillTableSelection {
+
+  /**
+   * The digest of the selection represented by the implementation. The
+   * selections that accompany Tables can modify the contained dataset, e.g.
+   * a file selection can restrict to a subset of the available data and a
+   * format selection can include options that affect the behaviour of the
+   * underlying reader. Two scans will end up being considered identical during
+   * logical planning if their digests are the same so selection
+   * implementations should override this method so that exactly those scans
+   * that really are identical (in terms of the data they produce) have matching
+   * digests.
+   *
+   * @return this selection's digest, normally a string built from its properties.
+   */
+  public String digest();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
index c406d9a644..8164287a02 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
@@ -30,12 +30,12 @@ public class DynamicDrillTable extends DrillTable {
 
   private final RelDataTypeHolder holder;
 
-  public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName, Object selection) {
+  public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName, DrillTableSelection selection) {
     this(plugin, storageEngineName, userName, selection, null);
   }
 
   public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName,
-    Object selection, MetadataProviderManager metadataProviderManager) {
+    DrillTableSelection selection, MetadataProviderManager metadataProviderManager) {
     super(storageEngineName, plugin, Schema.TableType.TABLE, userName, selection, metadataProviderManager);
     this.holder = new RelDataTypeHolder();
   }
@@ -46,7 +46,7 @@ public class DynamicDrillTable extends DrillTable {
    * non-FileSystem storage plugins such as Hive, HBase etc, we can remove this
    * constructor.
    */
-  public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, Object selection) {
+  public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection) {
     this(plugin, storageEngineName, ImpersonationUtil.getProcessUserName(), selection, null);
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedEnumerableTableScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
similarity index 82%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedEnumerableTableScan.java
rename to exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
index 3287a80e53..971d7d1aea 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedEnumerableTableScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
@@ -35,11 +35,11 @@ import java.util.List;
  * When directory-based partition pruning applied, file selection could be different for the same
  * table.
  */
-public class DirPrunedEnumerableTableScan extends EnumerableTableScan {
+public class SelectionBasedTableScan extends EnumerableTableScan {
   private final String digestFromSelection;
 
-  public DirPrunedEnumerableTableScan(RelOptCluster cluster, RelTraitSet traitSet,
-      RelOptTable table, Class elementType, String digestFromSelection) {
+  public SelectionBasedTableScan(RelOptCluster cluster, RelTraitSet traitSet,
+                                 RelOptTable table, Class elementType, String digestFromSelection) {
     super(cluster, traitSet, table, elementType);
     this.digestFromSelection = digestFromSelection;
   }
@@ -48,12 +48,11 @@ public class DirPrunedEnumerableTableScan extends EnumerableTableScan {
   public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
     final Table tbl = this.table.unwrap(Table.class);
     Class elementType = EnumerableTableScan.deduceElementType(tbl);
-
-    return new DirPrunedEnumerableTableScan(getCluster(), traitSet, table, elementType, digestFromSelection);
+    return new SelectionBasedTableScan(getCluster(), traitSet, table, elementType, digestFromSelection);
   }
 
-  /** Creates an DirPrunedEnumerableTableScan. */
-  public static EnumerableTableScan create(RelOptCluster cluster,
+  /** Creates an SelectionBasedTableScan. */
+  public static SelectionBasedTableScan create(RelOptCluster cluster,
       RelOptTable relOptTable, String digestFromSelection) {
     final Table table = relOptTable.unwrap(Table.class);
     Class elementType = EnumerableTableScan.deduceElementType(table);
@@ -66,7 +65,8 @@ public class DirPrunedEnumerableTableScan extends EnumerableTableScan {
                   }
                   return ImmutableList.of();
                 });
-    return new DirPrunedEnumerableTableScan(cluster, traitSet, relOptTable, elementType, digestFromSelection);
+
+    return new SelectionBasedTableScan(cluster, traitSet, relOptTable, elementType, digestFromSelection);
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
index b72ecee33e..9d8c753056 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
@@ -27,6 +27,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
 import org.apache.drill.exec.planner.common.DrillRelOptUtil;
+import org.apache.drill.exec.planner.logical.SelectionBasedTableScan;
 import org.apache.drill.exec.util.DrillFileSystemUtil;
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.calcite.adapter.enumerable.EnumerableTableScan;
@@ -778,4 +779,8 @@ public abstract class PruneScanRule extends StoragePluginOptimizerRule {
       return false;
     }
   }
+
+  private static boolean supportsScan(TableScan scan) {
+    return scan instanceof SelectionBasedTableScan;
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
index ebe1a43cea..6563c7803a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.store.dfs;
 
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.util.DrillStringUtils;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.util.DrillFileSystemUtil;
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
@@ -40,7 +41,7 @@ import java.util.stream.Collectors;
 /**
  * Jackson serializable description of a file selection.
  */
-public class FileSelection {
+public class FileSelection implements DrillTableSelection {
 
   private static final Logger logger = LoggerFactory.getLogger(FileSelection.class);
   private static final String WILD_CARD = "*";
@@ -437,6 +438,11 @@ public class FileSelection {
     this.emptyDirectory = true;
   }
 
+  @Override
+  public String digest() {
+    return toString();
+  }
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
index d2a55455a1..4210b82f8b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
@@ -21,11 +21,12 @@ import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.hadoop.fs.Path;
 
 import java.util.List;
 
-public class FormatSelection {
+public class FormatSelection implements DrillTableSelection {
 
   private FormatPluginConfig format;
   private FileSelection selection;
@@ -62,4 +63,14 @@ public class FormatSelection {
   public boolean supportsDirPruning() {
     return selection.supportsDirPruning();
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
+
+  @Override
+  public String toString() {
+    return String.format("fileSelection=%s,formatConfig=%s", selection, format);
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 3b3ea83cef..d0b1636199 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -319,6 +319,13 @@ public class EasyGroupScan extends AbstractGroupScanWithMetadata<TableMetadataPr
 
   @Override
   public String toString() {
+    // Note that the output of this method is incorporated in the digest of
+    // the corresponding scan node in the query plan. This means that the
+    // fields included here constitute what the planner will use to decide
+    // whether two scans are identical or not. E.g. the format config must be
+    // present here because format config can be overriden using table functions
+    // Two scans that differ by format config alone may produce different data
+    // and therefore should not be considered identical.
     return new PlanStringBuilder(this)
       .field("selectionRoot", selectionRoot)
       .field("numFiles", getFiles().size())
@@ -327,6 +334,7 @@ public class EasyGroupScan extends AbstractGroupScanWithMetadata<TableMetadataPr
       .field("schema", getSchema())
       .field("usedMetastore", usedMetastore())
       .field("limit", limit)
+      .field("formatConfig", getFormatConfig())
       .toString();
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
index 3e38fb2389..f08e380e5b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
@@ -21,6 +21,7 @@ import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.store.ischema.InfoSchemaTable.Catalogs;
 import org.apache.drill.exec.store.ischema.InfoSchemaTable.Columns;
@@ -40,7 +41,7 @@ import static org.slf4j.LoggerFactory.getLogger;
 /**
  * The set of tables / views in INFORMATION_SCHEMA.
  */
-public enum InfoSchemaTableType {
+public enum InfoSchemaTableType implements DrillTableSelection {
 
   CATALOGS(new Catalogs()),
   SCHEMATA(new Schemata()),
@@ -91,4 +92,9 @@ public enum InfoSchemaTableType {
   public RelDataType getRowType(RelDataTypeFactory typeFactory) {
     return tableDef.getRowType(typeFactory);
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
index fb00f836ea..983e150be6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
@@ -19,7 +19,6 @@ package org.apache.drill.exec.store.mock;
 
 import java.io.IOException;
 import java.net.URL;
-import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -39,10 +38,11 @@ import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.AbstractStoragePlugin;
 import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry;
+import org.apache.drill.exec.store.mock.MockTableDef.MockTableSelection;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
@@ -64,10 +64,11 @@ public class MockStorageEngine extends AbstractStoragePlugin {
   public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns)
       throws IOException {
 
-    List<MockTableDef.MockScanEntry> readEntries = selection.getListWith(new ObjectMapper(),
-        new TypeReference<ArrayList<MockTableDef.MockScanEntry>>() {
-        });
-
+    MockTableSelection tableSelection = selection.getWith(
+      new ObjectMapper(),
+      MockTableSelection.class
+    );
+    List<MockScanEntry> readEntries = tableSelection.getEntries();
     assert ! readEntries.isEmpty();
     return new MockGroupScanPOP(null, readEntries);
   }
@@ -161,6 +162,7 @@ public class MockStorageEngine extends AbstractStoragePlugin {
       } catch (IOException e) {
         throw new IllegalArgumentException("Unable to read mock table definition file: " + name, e);
       }
+
       return new DynamicDrillTable(engine, this.name, mockTableDefn.getEntries());
     }
 
@@ -177,10 +179,9 @@ public class MockStorageEngine extends AbstractStoragePlugin {
       if (unit == null) { }
       else if (unit.equalsIgnoreCase("K")) { n *= 1000; }
       else if (unit.equalsIgnoreCase("M")) { n *= 1_000_000; }
-      MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, 1, null);
-      List<MockTableDef.MockScanEntry> list = new ArrayList<>();
-      list.add(entry);
-      return new DynamicDrillTable(engine, this.name, list);
+      MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, 1, null);
+      MockTableSelection entries = new MockTableSelection(ImmutableList.<MockScanEntry>of(entry));
+      return new DynamicDrillTable(engine, this.name, entries);
     }
 
     @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
index 1b4af74fa9..5a9a6f59f5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
@@ -31,6 +31,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 
 /**
  * Structure of a mock table definition file. Yes, using Jackson deserialization to parse
@@ -84,6 +85,29 @@ public class MockTableDef {
     }
   }
 
+  /**
+   * A tiny wrapper class to add required DrillTableSelection behaviour to
+   * the entries list.
+   */
+  public static class MockTableSelection implements DrillTableSelection {
+    private final List<MockScanEntry> entries;
+
+    @JsonCreator
+    public MockTableSelection(@JsonProperty("entries") List<MockScanEntry> entries) {
+      this.entries = entries;
+    }
+
+    @JsonIgnore
+    @Override
+    public String digest() {
+      return entries.toString();
+    }
+
+    public List<MockScanEntry> getEntries() {
+      return entries;
+    }
+  }
+
   /**
    * Meta-data description of the columns we wish to create during a simulated
    * scan.
@@ -189,10 +213,10 @@ public class MockTableDef {
   }
 
   private String descrip;
-  List<MockTableDef.MockScanEntry> entries;
+  MockTableSelection entries;
 
   public MockTableDef(@JsonProperty("descrip") final String descrip,
-                      @JsonProperty("entries") final List<MockTableDef.MockScanEntry> entries) {
+                      @JsonProperty("entries") final MockTableSelection entries) {
     this.descrip = descrip;
     this.entries = entries;
   }
@@ -211,5 +235,5 @@ public class MockTableDef {
    * @return
    */
 
-  public List<MockTableDef.MockScanEntry> getEntries() { return entries; }
+  public MockTableSelection getEntries() { return entries; }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
index 7528d99b1c..da1a34ecf4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
@@ -23,6 +23,7 @@ import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.schema.TranslatableTable;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.util.Utilities;
@@ -36,7 +37,7 @@ public class PluginDrillTable extends DynamicDrillTable implements TranslatableT
   private final Convention convention;
 
   public PluginDrillTable(StoragePlugin plugin, String storageEngineName,
-      String userName, Object selection, Convention convention) {
+      String userName, DrillTableSelection selection, Convention convention) {
     super(plugin, storageEngineName, userName, selection);
     this.convention = convention;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
index dbe58917e0..07df14b56e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
@@ -21,6 +21,7 @@ import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.schema.Schema.TableType;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.RecordDataType;
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.util.ImpersonationUtil;
@@ -34,7 +35,7 @@ public class StaticDrillTable extends DrillTable {
 
   private final RecordDataType dataType;
 
-  public StaticDrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, Object selection, RecordDataType dataType) {
+  public StaticDrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, DrillTableSelection selection, RecordDataType dataType) {
     super(storageEngineName, plugin, tableType, ImpersonationUtil.getProcessUserName(), selection);
     this.dataType = dataType;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
index 5bddc8d734..35a4b64cff 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
@@ -21,6 +21,7 @@ import java.util.Iterator;
 
 import org.apache.drill.exec.alias.AliasTarget;
 import org.apache.drill.exec.ops.ExecutorFragmentContext;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.store.sys.OptionIterator.OptionValueWrapper;
 
 /**
@@ -31,7 +32,7 @@ import org.apache.drill.exec.store.sys.OptionIterator.OptionValueWrapper;
  *   PROFILES and PROFILES_JSON are stored in local / distributed storage.
  * </p>
  */
-public enum SystemTable {
+public enum SystemTable implements DrillTableSelection {
   OPTIONS_OLD("options_old", false, OptionValueWrapper.class) {
     @Deprecated
     @Override
@@ -164,4 +165,9 @@ public enum SystemTable {
   public Class<?> getPojoClass() {
     return pojoClass;
   }
+
+  @Override
+  public String digest() {
+    return toString();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
index 6628069794..f5a92733c9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
@@ -20,12 +20,13 @@ package org.apache.drill.exec.store.enumerable.plan;
 import org.apache.calcite.plan.RelOptTable;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.schema.TranslatableTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
 import org.apache.drill.exec.store.StoragePlugin;
 
 public class EnumMockTable extends DynamicDrillTable implements TranslatableTable {
 
-  public EnumMockTable(StoragePlugin plugin, String storageEngineName, String userName, Object selection) {
+  public EnumMockTable(StoragePlugin plugin, String storageEngineName, String userName, DrillTableSelection selection) {
     super(plugin, storageEngineName, userName, selection);
   }
 
diff --git a/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json b/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
index acae9e730e..2db5634705 100644
--- a/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
+++ b/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
@@ -18,30 +18,32 @@
     "op" : "scan",
     "@id" : 1,
     "storageengine" : "mock",
-    "selection" : [ {
-      "records" : 10,
-      "types" : [ {
-        "name" : "tinyint_val",
-        "type" : "TINYINT",
-        "mode" : "REQUIRED"
-      }, {
-        "name" : "smallint_val",
-        "type" : "SMALLINT",
-        "mode" : "REQUIRED"
-      }, {
-        "name" : "int_val",
-        "type" : "INT",
-        "mode" : "REQUIRED"
-      }, {
-        "name" : "bigint_val",
-        "type" : "BIGINT",
-        "mode" : "REQUIRED"
-      }, {
-        "name" : "uint8_val",
-        "type" : "UINT8",
-        "mode" : "REQUIRED"
+    "selection" : {
+      "entries": [ {
+        "records" : 10,
+        "types" : [ {
+          "name" : "tinyint_val",
+          "type" : "TINYINT",
+          "mode" : "REQUIRED"
+        }, {
+          "name" : "smallint_val",
+          "type" : "SMALLINT",
+          "mode" : "REQUIRED"
+        }, {
+          "name" : "int_val",
+          "type" : "INT",
+          "mode" : "REQUIRED"
+        }, {
+          "name" : "bigint_val",
+          "type" : "BIGINT",
+          "mode" : "REQUIRED"
+        }, {
+          "name" : "uint8_val",
+          "type" : "UINT8",
+          "mode" : "REQUIRED"
+        } ]
       } ]
-    } ]
+    }
   }, {
     "op" : "project",
     "@id" : 2,
@@ -84,4 +86,4 @@
     "target" : null,
     "storageEngine" : "--SCREEN--"
   } ]
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/scan_screen_logical.json b/exec/java-exec/src/test/resources/scan_screen_logical.json
index 1cf380049c..97f1ab9f95 100644
--- a/exec/java-exec/src/test/resources/scan_screen_logical.json
+++ b/exec/java-exec/src/test/resources/scan_screen_logical.json
@@ -18,14 +18,16 @@
     "op" : "scan",
     "memo" : "initial_scan",
     "storageengine" : "mock",
-    "selection" : [ {
-      "records" : 100,
-      "types" : [ {
-        "name" : "superhero_name",
-        "type" : "VARCHAR",
-        "mode" : "REQUIRED"
+    "selection" : {
+      "entries": [ {
+        "records" : 100,
+        "types" : [ {
+          "name" : "superhero_name",
+          "type" : "VARCHAR",
+          "mode" : "REQUIRED"
+        } ]
       } ]
-    } ]
+    }
   }, {
     "@id" : "2",
     "input" : 1,
@@ -35,4 +37,4 @@
       "file" : "console:///stdout"
     }
   } ]
-}
\ No newline at end of file
+}
diff --git a/pom.xml b/pom.xml
index f32c48bb9f..65dd0829f2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -50,7 +50,7 @@
     <slf4j.version>1.7.26</slf4j.version>
     <shaded.guava.version>28.2-jre</shaded.guava.version>
     <guava.version>30.1.1-jre</guava.version>
-    <forkCount>2</forkCount>
+    <forkCount>1</forkCount>
     <parquet.version>1.12.2</parquet.version>
     <parquet.format.version>2.8.0</parquet.format.version>
     <!--


[drill] 09/10: DRILL-8264: remove xalan dependency

Posted by dz...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch 1.20
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 5218a74fc899caf72b6ea57c2765aac05e7519b7
Author: PJ Fanning <pj...@users.noreply.github.com>
AuthorDate: Tue Jul 19 21:26:30 2022 +0100

    DRILL-8264: remove xalan dependency
---
 exec/java-exec/pom.xml |  4 ----
 exec/jdbc/pom.xml      |  6 +-----
 pom.xml                | 39 +++++++++------------------------------
 3 files changed, 10 insertions(+), 39 deletions(-)

diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 125edd30dd..74974047a6 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -139,10 +139,6 @@
       <groupId>xerces</groupId>
       <artifactId>xercesImpl</artifactId>
     </dependency>
-    <dependency>
-      <groupId>xalan</groupId>
-      <artifactId>xalan</artifactId>
-    </dependency>
     <dependency>
       <groupId>com.sun.codemodel</groupId>
       <artifactId>codemodel</artifactId>
diff --git a/exec/jdbc/pom.xml b/exec/jdbc/pom.xml
index 389b36ba33..6b898c6f34 100644
--- a/exec/jdbc/pom.xml
+++ b/exec/jdbc/pom.xml
@@ -72,15 +72,11 @@
       <artifactId>sqlline</artifactId>
       <scope>test</scope>
     </dependency>
-    <!-- Specify xalan and xerces versions to avoid setXIncludeAware error. -->
+    <!-- Specify xerces versions to avoid setXIncludeAware error. -->
     <dependency>
       <groupId>xerces</groupId>
       <artifactId>xercesImpl</artifactId>
     </dependency>
-    <dependency>
-      <groupId>xalan</groupId>
-      <artifactId>xalan</artifactId>
-    </dependency>
     <dependency>
       <groupId>javax.validation</groupId>
       <artifactId>validation-api</artifactId>
diff --git a/pom.xml b/pom.xml
index 65dd0829f2..30c81e0283 100644
--- a/pom.xml
+++ b/pom.xml
@@ -129,7 +129,6 @@
     <testcontainers.version>1.16.3</testcontainers.version>
     <typesafe.config.version>1.0.0</typesafe.config.version>
     <commons.codec.version>1.14</commons.codec.version>
-    <xalan.version>2.7.2</xalan.version>
     <xerces.version>2.12.2</xerces.version>
     <commons.configuration.version>1.10</commons.configuration.version>
     <commons.beanutils.version>1.9.4</commons.beanutils.version>
@@ -1973,11 +1972,6 @@
         <artifactId>xercesImpl</artifactId>
         <version>${xerces.version}</version>
       </dependency>
-      <dependency>
-        <groupId>xalan</groupId>
-        <artifactId>xalan</artifactId>
-        <version>${xalan.version}</version>
-      </dependency>
       <dependency>
         <groupId>commons-configuration</groupId>
         <artifactId>commons-configuration</artifactId>
@@ -2761,11 +2755,6 @@
             <artifactId>xercesImpl</artifactId>
             <version>${xerces.version}</version>
           </dependency>
-          <dependency>
-            <groupId>xalan</groupId>
-            <artifactId>xalan</artifactId>
-            <version>${xalan.version}</version>
-          </dependency>
           <dependency>
             <groupId>net.sf.jpam</groupId>
             <artifactId>jpam</artifactId>
@@ -3135,20 +3124,15 @@
             <version>${xerces.version}</version>
           </dependency>
           <dependency>
-            <groupId>xalan</groupId>
-            <artifactId>xalan</artifactId>
-            <version>${xalan.version}</version>
-          </dependency>
-          <dependency>
-          	<groupId>org.apache.parquet</groupId>
-          	<artifactId>parquet-hadoop</artifactId>
-          	<version>${parquet.version}</version>
-          	<exclusions>
-          		<exclusion>
-          			<groupId>org.xerial.snappy</groupId>
-          			<artifactId>snappy-java</artifactId>
-          		</exclusion>
-          	</exclusions>
+            <groupId>org.apache.parquet</groupId>
+            <artifactId>parquet-hadoop</artifactId>
+            <version>${parquet.version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>org.xerial.snappy</groupId>
+                <artifactId>snappy-java</artifactId>
+              </exclusion>
+            </exclusions>
           </dependency>
           <!-- Test Dependencies -->
           <dependency>
@@ -4030,11 +4014,6 @@
             <artifactId>xercesImpl</artifactId>
             <version>${xerces.version}</version>
           </dependency>
-          <dependency>
-            <groupId>xalan</groupId>
-            <artifactId>xalan</artifactId>
-            <version>${xalan.version}</version>
-          </dependency>
           <dependency>
             <groupId>org.apache.parquet</groupId>
             <artifactId>parquet-hadoop</artifactId>