You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2018/11/26 16:04:22 UTC

[GitHub] asfgit closed pull request #1542: DRILL-6850: JDBC integration tests failures

asfgit closed pull request #1542: DRILL-6850: JDBC integration tests failures
URL: https://github.com/apache/drill/pull/1542
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/contrib/pom.xml b/contrib/pom.xml
index 796e79bdf59..59e12da5926 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -31,6 +31,10 @@
   <name>contrib/Parent Pom</name>
   <packaging>pom</packaging>
 
+  <properties>
+    <skipTests>false</skipTests>
+  </properties>
+
   <dependencies>
   </dependencies>
 
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java
index f857ec62611..abb867618e9 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java
@@ -18,6 +18,8 @@
 package org.apache.drill.exec.store.hive.schema;
 
 import java.io.IOException;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
@@ -45,9 +47,6 @@
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.thrift.TException;
 
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
-import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
-
 public class HiveSchemaFactory extends AbstractSchemaFactory {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveSchemaFactory.class);
 
@@ -137,10 +136,10 @@ public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws
     private HiveDatabaseSchema defaultSchema;
 
     HiveSchema(final SchemaConfig schemaConfig, final DrillHiveMetaStoreClient mClient, final String name) {
-      super(ImmutableList.<String>of(), name);
+      super(Collections.emptyList(), name);
       this.schemaConfig = schemaConfig;
       this.mClient = mClient;
-      getSubSchema("default");
+      getSubSchema(DEFAULT_WS_NAME);
     }
 
     @Override
@@ -152,7 +151,7 @@ public AbstractSchema getSubSchema(String name) {
           return null;
         }
         HiveDatabaseSchema schema = getSubSchemaKnownExists(name);
-        if (name.equals("default")) {
+        if (DEFAULT_WS_NAME.equals(name)) {
           this.defaultSchema = schema;
         }
         return schema;
@@ -181,8 +180,8 @@ public boolean showInInformationSchema() {
     public Set<String> getSubSchemaNames() {
       try {
         List<String> dbs = mClient.getDatabases(schemaConfig.getIgnoreAuthErrors());
-        return Sets.newHashSet(dbs);
-      } catch (final TException e) {
+        return new HashSet<>(dbs);
+      } catch (TException e) {
         logger.warn("Failure while getting Hive database list.", e);
       }
       return super.getSubSchemaNames();
@@ -227,11 +226,11 @@ DrillTable getDrillTable(String dbName, String t) {
 
     HiveReadEntry getSelectionBaseOnName(String dbName, String t) {
       if (dbName == null) {
-        dbName = "default";
+        dbName = DEFAULT_WS_NAME;
       }
-      try{
+      try {
         return mClient.getHiveReadEntry(dbName, t, schemaConfig.getIgnoreAuthErrors());
-      }catch(final TException e) {
+      } catch (TException e) {
         logger.warn("Exception occurred while trying to read table. {}.{}", dbName, t, e.getCause());
         return null;
       }
diff --git a/contrib/storage-jdbc/pom.xml b/contrib/storage-jdbc/pom.xml
index 7de89f08afe..6ebcfa5d64b 100755
--- a/contrib/storage-jdbc/pom.xml
+++ b/contrib/storage-jdbc/pom.xml
@@ -31,10 +31,9 @@
   <name>contrib/jdbc-storage-plugin</name>
 
   <properties>
-    <mysql.connector.version>5.1.36</mysql.connector.version>
+    <mysql.connector.version>8.0.13</mysql.connector.version>
     <derby.database.name>drill_derby_test</derby.database.name>
     <mysql.database.name>drill_mysql_test</mysql.database.name>
-    <skipTests>false</skipTests>
   </properties>
 
   <dependencies>
@@ -62,13 +61,13 @@
     <dependency>
       <groupId>org.apache.derby</groupId>
       <artifactId>derbyclient</artifactId>
-      <version>10.11.1.1</version>
+      <version>10.14.2.0</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.derby</groupId>
       <artifactId>derbynet</artifactId>
-      <version>10.11.1.1</version>
+      <version>10.14.2.0</version>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -104,7 +103,7 @@
         <!-- Because the JDBC tests are somewhat heavyweight, we only run them in the 'verify' phase -->
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-failsafe-plugin</artifactId>
-        <version>2.18.1</version>
+        <version>2.22.1</version>
         <configuration>
           <forkCount combine.self="override">1</forkCount>
           <systemPropertyVariables>
@@ -112,6 +111,7 @@
             <mysql.port>${mysql.reserved.port}</mysql.port>
             <mysql.name>${mysql.database.name}</mysql.name>
           </systemPropertyVariables>
+          <skipITs>${skipTests}</skipITs>
           <includes>
             <include>**/*IT.java</include>
           </includes>
@@ -120,6 +120,15 @@
           <execution>
             <id>run-IT-Tests</id>
             <phase>integration-test</phase>
+            <goals>
+              <goal>integration-test</goal>
+            </goals>
+          </execution>
+          <execution>
+            <phase>verify</phase>
+            <goals>
+              <goal>verify</goal>
+            </goals>
           </execution>
         </executions>
       </plugin>
@@ -127,7 +136,7 @@
         <!-- Allows us to reserve ports for external servers that we will launch  -->
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.9.1</version>
+        <version>3.0.0</version>
         <executions>
           <execution>
             <id>reserve-network-port</id>
@@ -146,7 +155,7 @@
       </plugin>
       <plugin>
         <artifactId>maven-dependency-plugin</artifactId>
-        <version>2.8</version>
+        <version>3.1.1</version>
         <executions>
           <execution>
             <goals>
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStorageConfig.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStorageConfig.java
index 3c3ce3c4421..1c607d606cd 100755
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStorageConfig.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStorageConfig.java
@@ -34,18 +34,21 @@
   private final String url;
   private final String username;
   private final String password;
+  private final boolean caseInsensitiveTableNames;
 
   @JsonCreator
   public JdbcStorageConfig(
       @JsonProperty("driver") String driver,
       @JsonProperty("url") String url,
       @JsonProperty("username") String username,
-      @JsonProperty("password") String password) {
+      @JsonProperty("password") String password,
+      @JsonProperty("caseInsensitiveTableNames") boolean caseInsensitiveTableNames) {
     super();
     this.driver = driver;
     this.url = url;
     this.username = username;
     this.password = password;
+    this.caseInsensitiveTableNames = caseInsensitiveTableNames;
   }
 
   public String getDriver() {
@@ -64,6 +67,11 @@ public String getPassword() {
     return password;
   }
 
+  @JsonProperty("caseInsensitiveTableNames")
+  public boolean areTableNamesCaseInsensitive() {
+    return caseInsensitiveTableNames;
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -72,6 +80,7 @@ public int hashCode() {
     result = prime * result + ((password == null) ? 0 : password.hashCode());
     result = prime * result + ((url == null) ? 0 : url.hashCode());
     result = prime * result + ((username == null) ? 0 : username.hashCode());
+    result = prime * result + (caseInsensitiveTableNames ? 1231 : 1237);
     return result;
   }
 
@@ -87,6 +96,9 @@ public boolean equals(Object obj) {
       return false;
     }
     JdbcStorageConfig other = (JdbcStorageConfig) obj;
+    if (caseInsensitiveTableNames != other.caseInsensitiveTableNames) {
+      return false;
+    }
     if (driver == null) {
       if (other.driver != null) {
         return false;
@@ -117,6 +129,4 @@ public boolean equals(Object obj) {
     }
     return true;
   }
-
-
 }
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
index b0338cb85b5..ebff37173f8 100755
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcStoragePlugin.java
@@ -22,6 +22,8 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -59,13 +61,12 @@
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.AbstractStoragePlugin;
 import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.exec.store.jdbc.DrillJdbcRuleBase.DrillJdbcFilterRule;
 import org.apache.drill.exec.store.jdbc.DrillJdbcRuleBase.DrillJdbcProjectRule;
 
 import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
-import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
 
 public class JdbcStoragePlugin extends AbstractStoragePlugin {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JdbcStoragePlugin.class);
@@ -157,10 +158,10 @@ public JdbcStoragePlugin getPlugin() {
   }
 
   /**
-   * Returns whether a condition is supported by {@link JdbcJoin}.
+   * Returns whether a condition is supported by {@link org.apache.calcite.adapter.jdbc.JdbcRules.JdbcJoin}.
    *
    * <p>Corresponds to the capabilities of
-   * {@link SqlImplementor#convertConditionToSqlNode}.
+   * {@link org.apache.calcite.rel.rel2sql.SqlImplementor#convertConditionToSqlNode}.
    *
    * @param node Condition
    * @return Whether condition is supported
@@ -234,7 +235,7 @@ public RelNode convert(RelNode in) {
 
   private class CapitalizingJdbcSchema extends AbstractSchema {
 
-    final Map<String, CapitalizingJdbcSchema> schemaMap = Maps.newHashMap();
+    private final Map<String, CapitalizingJdbcSchema> schemaMap = new HashMap<>();
     private final JdbcSchema inner;
 
     public CapitalizingJdbcSchema(List<String> parentSchemaPath, String name, DataSource dataSource,
@@ -291,19 +292,33 @@ public Table getTable(String name) {
       if (table != null) {
         return table;
       }
-      return inner.getTable(name.toUpperCase());
+      if (!areTableNamesCaseSensitive()) {
+        // Oracle and H2 changes unquoted identifiers to uppercase.
+        table = inner.getTable(name.toUpperCase());
+        if (table != null) {
+          return table;
+        }
+        // Postgres changes unquoted identifiers to lowercase.
+        return inner.getTable(name.toLowerCase());
+      }
 
+      // no table was found.
+      return null;
     }
 
+    @Override
+    public boolean areTableNamesCaseSensitive() {
+      return !config.areTableNamesCaseInsensitive();
+    }
   }
 
   private class JdbcCatalogSchema extends AbstractSchema {
 
-    private final Map<String, CapitalizingJdbcSchema> schemaMap = Maps.newHashMap();
+    private final Map<String, CapitalizingJdbcSchema> schemaMap = new HashMap<>();
     private final CapitalizingJdbcSchema defaultSchema;
 
     public JdbcCatalogSchema(String name) {
-      super(ImmutableList.<String> of(), name);
+      super(Collections.emptyList(), name);
 
       try (Connection con = source.getConnection();
            ResultSet set = con.getMetaData().getCatalogs()) {
@@ -311,7 +326,7 @@ public JdbcCatalogSchema(String name) {
           final String catalogName = set.getString(1);
           CapitalizingJdbcSchema schema = new CapitalizingJdbcSchema(
               getSchemaPath(), catalogName, source, dialect, convention, catalogName, null);
-          schemaMap.put(catalogName, schema);
+          schemaMap.put(schema.getName(), schema);
         }
       } catch (SQLException e) {
         logger.warn("Failure while attempting to load JDBC schema.", e);
@@ -325,7 +340,7 @@ public JdbcCatalogSchema(String name) {
 
         if (!schemasAdded) {
           // there were no schemas, just create a default one (the jdbc system doesn't support catalogs/schemas).
-          schemaMap.put("default", new CapitalizingJdbcSchema(ImmutableList.<String> of(), name, source, dialect,
+          schemaMap.put(SchemaFactory.DEFAULT_WS_NAME, new CapitalizingJdbcSchema(Collections.emptyList(), name, source, dialect,
               convention, null, null));
         }
       } else {
@@ -334,8 +349,6 @@ public JdbcCatalogSchema(String name) {
       }
 
       defaultSchema = schemaMap.values().iterator().next();
-
-
     }
 
     void setHolder(SchemaPlus plusOfThis) {
@@ -360,7 +373,7 @@ private boolean addSchemas() {
                 convention, catalogName, schemaName);
 
             // if a catalog schema doesn't exist, we'll add this at the top level.
-            schemaMap.put(schemaName, schema);
+            schemaMap.put(schema.getName(), schema);
           } else {
             CapitalizingJdbcSchema schema = new CapitalizingJdbcSchema(parentSchema.getSchemaPath(), schemaName,
                 source, dialect,
@@ -404,14 +417,9 @@ public Table getTable(String name) {
 
       if (schema != null) {
         try {
-          Table t = schema.getTable(name);
-          if (t != null) {
-            return t;
-          }
-          return schema.getTable(name.toUpperCase());
+          return schema.getTable(name);
         } catch (RuntimeException e) {
           logger.warn("Failure while attempting to read table '{}' from JDBC source.", name, e);
-
         }
       }
 
@@ -424,6 +432,10 @@ public Table getTable(String name) {
       return defaultSchema.getTableNames();
     }
 
+    @Override
+    public boolean areTableNamesCaseSensitive() {
+      return defaultSchema.areTableNamesCaseSensitive();
+    }
   }
 
   @Override
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java
index e2e408956a8..168b5f37a19 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java
@@ -20,11 +20,15 @@
 import org.apache.drill.categories.JdbcStorageTest;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
-import org.apache.drill.exec.proto.UserBitShared;
 
+import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.math.BigDecimal;
+import java.nio.file.Paths;
+
 import static org.junit.Assert.assertEquals;
 
 /**
@@ -33,107 +37,132 @@
 @Category(JdbcStorageTest.class)
 public class TestJdbcPluginWithDerbyIT extends PlanTestBase {
 
+  private static final String TABLE_PATH = "jdbcmulti/";
+  private static final String TABLE_NAME = String.format("%s.`%s`", StoragePluginTestUtils.DFS_PLUGIN_NAME, TABLE_PATH);
+
+  @BeforeClass
+  public static void copyData() {
+    dirTestWatcher.copyResourceToRoot(Paths.get(TABLE_PATH));
+  }
+
   @Test
   public void testCrossSourceMultiFragmentJoin() throws Exception {
-    testNoResult("USE derby");
     testNoResult("SET `planner.slice_target` = 1");
-    String query = "select x.person_id, y.salary from DRILL_DERBY_TEST.PERSON x "
-        + "join dfs.`${WORKING_PATH}/src/test/resources/jdbcmulti/` y on x.person_id = y.person_id ";
-    test(query);
+    test("select x.person_id, y.salary from derby.drill_derby_test.person x "
+        + "join %s y on x.person_id = y.person_id ", TABLE_NAME);
   }
 
   @Test
   public void validateResult() throws Exception {
-
     // Skip date, time, and timestamp types since derby mangles these due to improper timezone support.
     testBuilder()
-            .sqlQuery(
-                    "select PERSON_ID, FIRST_NAME, LAST_NAME, ADDRESS, CITY, STATE, ZIP, JSON, BIGINT_FIELD, SMALLINT_FIELD, " +
-                            "NUMERIC_FIELD, BOOLEAN_FIELD, DOUBLE_FIELD, FLOAT_FIELD, REAL_FIELD, TIME_FIELD, TIMESTAMP_FIELD, " +
-                            "DATE_FIELD, CLOB_FIELD from derby.DRILL_DERBY_TEST.PERSON")
-            .ordered()
-            .baselineColumns("PERSON_ID", "FIRST_NAME", "LAST_NAME", "ADDRESS", "CITY", "STATE", "ZIP", "JSON",
-                    "BIGINT_FIELD", "SMALLINT_FIELD", "NUMERIC_FIELD", "BOOLEAN_FIELD", "DOUBLE_FIELD",
-                    "FLOAT_FIELD", "REAL_FIELD", "TIME_FIELD", "TIMESTAMP_FIELD", "DATE_FIELD", "CLOB_FIELD")
-            .baselineValues(1, "first_name_1", "last_name_1", "1401 John F Kennedy Blvd",   "Philadelphia",     "PA",
-                            19107, "{ a : 5, b : 6 }",            123456L,         1, 10.01, false, 1.0, 1.1, 111.00,
-                            DateUtility.parseLocalTime("13:00:01.0"), DateUtility.parseLocalDateTime("2012-02-29 13:00:01.0"), DateUtility.parseLocalDate("2012-02-29"), "some clob data 1")
-            .baselineValues(2, "first_name_2", "last_name_2", "One Ferry Building",         "San Francisco",    "CA",
-                            94111, "{ foo : \"abc\" }",            95949L,         2, 20.02, true, 2.0, 2.1, 222.00,
-                            DateUtility.parseLocalTime("23:59:59.0"),  DateUtility.parseLocalDateTime("1999-09-09 23:59:59.0"), DateUtility.parseLocalDate("1999-09-09"), "some more clob data")
-            .baselineValues(3, "first_name_3", "last_name_3", "176 Bowery",                 "New York",         "NY",
-                            10012, "{ z : [ 1, 2, 3 ] }",           45456L,        3, 30.04, true, 3.0, 3.1, 333.00,
-                            DateUtility.parseLocalTime("11:34:21.0"),  DateUtility.parseLocalDateTime("2011-10-30 11:34:21.0"), DateUtility.parseLocalDate("2011-10-30"), "clobber")
-            .baselineValues(4, null, null, "2 15th St NW", "Washington", "DC", 20007, "{ z : { a : 1, b : 2, c : 3 } " +
-                    "}", -67L, 4, 40.04, false, 4.0, 4.1, 444.00, DateUtility.parseLocalTime("16:00:01.0"), DateUtility.parseLocalDateTime("2015-06-01 16:00:01.0"),  DateUtility.parseLocalDate("2015-06-01"), "xxx")
-            .baselineValues(5, null, null, null, null, null, null, null, null, null, null, null, null, null, null,
-                            null, null, null, null)
-            .build().run();
+        .sqlQuery(
+            "select person_id, first_name, last_name, address, city, state, zip, json, bigint_field, smallint_field, " +
+                "numeric_field, boolean_field, double_field, float_field, real_field, time_field, timestamp_field, " +
+                "date_field, clob_field from derby.`drill_derby_test`.person")
+        .ordered()
+        .baselineColumns("person_id", "first_name", "last_name", "address", "city", "state", "zip", "json",
+            "bigint_field", "smallint_field", "numeric_field", "boolean_field", "double_field", "float_field",
+            "real_field", "time_field", "timestamp_field", "date_field", "clob_field")
+        .baselineValues(1, "first_name_1", "last_name_1", "1401 John F Kennedy Blvd",   "Philadelphia",     "PA", 19107,
+            "{ a : 5, b : 6 }", 123456L, 1, new BigDecimal("10.01"), false, 1.0, 1.1, 111.00,
+            DateUtility.parseLocalTime("13:00:01.0"), DateUtility.parseLocalDateTime("2012-02-29 13:00:01.0"),
+            DateUtility.parseLocalDate("2012-02-29"), "some clob data 1")
+        .baselineValues(2, "first_name_2", "last_name_2", "One Ferry Building", "San Francisco", "CA", 94111,
+            "{ foo : \"abc\" }", 95949L, 2, new BigDecimal("20.02"), true, 2.0, 2.1, 222.00,
+            DateUtility.parseLocalTime("23:59:59.0"),  DateUtility.parseLocalDateTime("1999-09-09 23:59:59.0"),
+            DateUtility.parseLocalDate("1999-09-09"), "some more clob data")
+        .baselineValues(3, "first_name_3", "last_name_3", "176 Bowery", "New York", "NY", 10012, "{ z : [ 1, 2, 3 ] }",
+            45456L, 3, new BigDecimal("30.04"), true, 3.0, 3.1, 333.00, DateUtility.parseLocalTime("11:34:21.0"),
+            DateUtility.parseLocalDateTime("2011-10-30 11:34:21.0"), DateUtility.parseLocalDate("2011-10-30"), "clobber")
+        .baselineValues(4, null, null, "2 15th St NW", "Washington", "DC", 20007, "{ z : { a : 1, b : 2, c : 3 } }",
+            -67L, 4, new BigDecimal("40.04"), false, 4.0, 4.1, 444.00, DateUtility.parseLocalTime("16:00:01.0"),
+            DateUtility.parseLocalDateTime("2015-06-01 16:00:01.0"),  DateUtility.parseLocalDate("2015-06-01"), "xxx")
+        .baselineValues(5, null, null, null, null, null, null, null, null, null, null, null, null, null, null,
+            null, null, null, null)
+        .go();
   }
 
   @Test
   public void pushdownJoin() throws Exception {
     testNoResult("use derby");
-    String query = "select x.person_id from (select person_id from DRILL_DERBY_TEST.PERSON) x "
-            + "join (select person_id from DRILL_DERBY_TEST.PERSON) y on x.person_id = y.person_id ";
+    String query = "select x.person_id from (select person_id from derby.drill_derby_test.person) x "
+            + "join (select person_id from derby.drill_derby_test.person) y on x.person_id = y.person_id ";
     testPlanMatchingPatterns(query, new String[]{}, new String[]{"Join"});
   }
 
   @Test
   public void pushdownJoinAndFilterPushDown() throws Exception {
     final String query = "select * from \n" +
-            "derby.DRILL_DERBY_TEST.PERSON e\n" +
-            "INNER JOIN \n" +
-            "derby.DRILL_DERBY_TEST.PERSON s\n" +
-            "ON e.FIRST_NAME = s.FIRST_NAME\n" +
-            "WHERE e.LAST_NAME > 'hello'";
+        "derby.drill_derby_test.person e\n" +
+        "INNER JOIN \n" +
+        "derby.drill_derby_test.person s\n" +
+        "ON e.FIRST_NAME = s.FIRST_NAME\n" +
+        "WHERE e.LAST_NAME > 'hello'";
 
     testPlanMatchingPatterns(query, new String[] {}, new String[] { "Join", "Filter" });
   }
 
   @Test
   public void pushdownAggregation() throws Exception {
-    final String query = "select count(*) from derby.DRILL_DERBY_TEST.PERSON";
+    final String query = "select count(*) from derby.drill_derby_test.person";
     testPlanMatchingPatterns(query, new String[] {}, new String[] { "Aggregate" });
   }
 
   @Test
   public void pushdownDoubleJoinAndFilter() throws Exception {
     final String query = "select * from \n" +
-            "derby.DRILL_DERBY_TEST.PERSON e\n" +
-            "INNER JOIN \n" +
-            "derby.DRILL_DERBY_TEST.PERSON s\n" +
-            "ON e.PERSON_ID = s.PERSON_ID\n" +
-            "INNER JOIN \n" +
-            "derby.DRILL_DERBY_TEST.PERSON ed\n" +
-            "ON e.PERSON_ID = ed.PERSON_ID\n" +
-            "WHERE s.FIRST_NAME > 'abc' and ed.FIRST_NAME > 'efg'";
+        "derby.drill_derby_test.person e\n" +
+        "INNER JOIN \n" +
+        "derby.drill_derby_test.person s\n" +
+        "ON e.person_ID = s.person_ID\n" +
+        "INNER JOIN \n" +
+        "derby.drill_derby_test.person ed\n" +
+        "ON e.person_ID = ed.person_ID\n" +
+        "WHERE s.first_name > 'abc' and ed.first_name > 'efg'";
     testPlanMatchingPatterns(query, new String[] {}, new String[] { "Join", "Filter" });
   }
 
   @Test
   public void showTablesDefaultSchema() throws Exception {
-    testNoResult("use derby");
-    assertEquals(1, testRunAndPrint(UserBitShared.QueryType.SQL, "show tables like 'PERSON'"));
+    testNoResult("use derby.drill_derby_test");
+    assertEquals(1, testSql("show tables like 'PERSON'"));
+
+    // check table names case insensitivity
+    assertEquals(1, testSql("show tables like 'person'"));
   }
 
   @Test
   public void describe() throws Exception {
-    testNoResult("use derby");
-    assertEquals(19, testRunAndPrint(UserBitShared.QueryType.SQL, "describe PERSON"));
+    testNoResult("use derby.drill_derby_test");
+    assertEquals(19, testSql("describe PERSON"));
+
+    // check table names case insensitivity
+    assertEquals(19, testSql("describe person"));
   }
 
   @Test
   public void ensureDrillFunctionsAreNotPushedDown() throws Exception {
     // This should verify that we're not trying to push CONVERT_FROM into the JDBC storage plugin. If were pushing
     // this function down, the SQL query would fail.
-    testNoResult("select CONVERT_FROM(JSON, 'JSON') from derby.DRILL_DERBY_TEST.PERSON where PERSON_ID = 4");
+    testNoResult("select CONVERT_FROM(JSON, 'JSON') from derby.drill_derby_test.person where person_ID = 4");
   }
 
   @Test
   public void pushdownFilter() throws Exception {
-    testNoResult("use derby");
-    String query = "select * from DRILL_DERBY_TEST.PERSON where PERSON_ID = 1";
+    String query = "select * from derby.drill_derby_test.person where person_ID = 1";
     testPlanMatchingPatterns(query, new String[]{}, new String[]{"Filter"});
   }
+
+  @Test
+  public void testCaseInsensitiveTableNames() throws Exception {
+    assertEquals(5, testSql("select * from derby.drill_derby_test.PeRsOn"));
+    assertEquals(5, testSql("select * from derby.drill_derby_test.PERSON"));
+    assertEquals(5, testSql("select * from derby.drill_derby_test.person"));
+  }
+
+  @Test
+  public void testJdbcStoragePluginSerDe() throws Exception {
+    testPhysicalPlanExecutionBasedOnQuery("select * from derby.drill_derby_test.PeRsOn");
+  }
 }
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
index 0f377e3e8a8..361559c9398 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
@@ -24,6 +24,10 @@
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.math.BigDecimal;
+
+import static org.junit.Assert.assertEquals;
+
 /**
  * JDBC storage plugin tests against MySQL.
  * Note: it requires libaio.so library in the system
@@ -35,77 +39,77 @@
   public void validateResult() throws Exception {
 
     testBuilder()
-            .sqlQuery(
-                    "select person_id, " +
-                            "first_name, last_name, address, city, state, zip, " +
-                            "bigint_field, smallint_field, numeric_field, " +
-                            "boolean_field, double_field, float_field, real_field, " +
-                            "date_field, datetime_field, year_field, time_field, " +
-                            "json, text_field, tiny_text_field, medium_text_field, long_text_field, " +
-                            "blob_field, bit_field, enum_field " +
-                    "from mysql.`drill_mysql_test`.person")
-            .ordered()
-            .baselineColumns("person_id",
-                    "first_name", "last_name", "address", "city", "state", "zip",
-                    "bigint_field", "smallint_field", "numeric_field",
-                    "boolean_field",
-                    "double_field", "float_field", "real_field",
-                    "date_field", "datetime_field", "year_field", "time_field",
-                    "json", "text_field", "tiny_text_field", "medium_text_field", "long_text_field",
-                    "blob_field", "bit_field", "enum_field")
-            .baselineValues(1,
-                    "first_name_1", "last_name_1", "1401 John F Kennedy Blvd", "Philadelphia", "PA", 19107,
-                    123456789L, 1, 10.01,
-                    false,
-                    1.0, 1.1, 1.2,
-                    DateUtility.parseLocalDate("2012-02-29"), DateUtility.parseLocalDateTime("2012-02-29 13:00:01.0"), DateUtility.parseLocalDate("2015-01-01"), DateUtility.parseLocalTime("13:00:01.0"),
-                    "{ a : 5, b : 6 }",
-                    "It is a long established fact that a reader will be distracted by the readable content of a page when looking at its layout",
-                    "xxx",
-                    "a medium piece of text",
-                    "a longer piece of text this is going on.....",
-                    "this is a test".getBytes(),
-                    true, "XXX")
-            .baselineValues(2,
-                    "first_name_2", "last_name_2", "One Ferry Building", "San Francisco", "CA", 94111,
-                    45456767L, 3, 30.04,
-                    true,
-                    3.0, 3.1, 3.2,
-                    DateUtility.parseLocalDate("2011-10-30"), DateUtility.parseLocalDateTime("2011-10-30 11:34:21.0"), DateUtility.parseLocalDate("2015-01-01"), DateUtility.parseLocalTime("11:34:21.0"),
-                    "{ z : [ 1, 2, 3 ] }",
-                    "It is a long established fact that a reader will be distracted by the readable content of a page when looking at its layout",
-                    "abc",
-                    "a medium piece of text 2",
-                    "somewhat more text",
-                    "this is a test 2".getBytes(),
-                    false, "YYY")
-            .baselineValues(3,
-                    "first_name_3", "last_name_3", "176 Bowery", "New York", "NY", 10012,
-                    123090L, -3, 55.12,
-                    false,
-                    5.0, 5.1, 5.55,
-                    DateUtility.parseLocalDate("2015-06-01"), DateUtility.parseLocalDateTime("2015-09-22 15:46:10.0"), DateUtility.parseLocalDate("1901-01-01"), DateUtility.parseLocalTime("16:00:01.0"),
-                    "{ [ a, b, c ] }",
-                    "Neque porro quisquam est qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit",
-                    "abc",
-                    "a medium piece of text 3",
-                    "somewhat more text",
-                    "this is a test 3".getBytes(),
-                    true, "ZZZ")
-            .baselineValues(5,
-                    null, null, null, null, null, null,
-                    null, null, null,
-                    null,
-                    null, null, null,
-                    null, null, null, null,
-                    null,
-                    null,
-                    null,
-                    null,
-                    null,
-                    null,
-                    null, "XXX")
-                  .build().run();
+        .sqlQuery(
+            "select person_id, " +
+                "first_name, last_name, address, city, state, zip, " +
+                "bigint_field, smallint_field, numeric_field, " +
+                "boolean_field, double_field, float_field, real_field, " +
+                "date_field, datetime_field, year_field, time_field, " +
+                "json, text_field, tiny_text_field, medium_text_field, long_text_field, " +
+                "blob_field, bit_field, enum_field " +
+            "from mysql.`drill_mysql_test`.person")
+        .ordered()
+        .baselineColumns("person_id",
+            "first_name", "last_name", "address", "city", "state", "zip",
+            "bigint_field", "smallint_field", "numeric_field",
+            "boolean_field",
+            "double_field", "float_field", "real_field",
+            "date_field", "datetime_field", "year_field", "time_field",
+            "json", "text_field", "tiny_text_field", "medium_text_field", "long_text_field",
+            "blob_field", "bit_field", "enum_field")
+        .baselineValues(1,
+            "first_name_1", "last_name_1", "1401 John F Kennedy Blvd", "Philadelphia", "PA", 19107,
+            123456789L, 1, new BigDecimal("10.01"),
+            false,
+            1.0, 1.1, 1.2,
+            DateUtility.parseLocalDate("2012-02-29"), DateUtility.parseLocalDateTime("2012-02-29 13:00:01.0"), DateUtility.parseLocalDate("2015-01-01"), DateUtility.parseLocalTime("13:00:01.0"),
+            "{ a : 5, b : 6 }",
+            "It is a long established fact that a reader will be distracted by the readable content of a page when looking at its layout",
+            "xxx",
+            "a medium piece of text",
+            "a longer piece of text this is going on.....",
+            "this is a test".getBytes(),
+            true, "XXX")
+        .baselineValues(2,
+            "first_name_2", "last_name_2", "One Ferry Building", "San Francisco", "CA", 94111,
+            45456767L, 3, new BigDecimal("30.04"),
+            true,
+            3.0, 3.1, 3.2,
+            DateUtility.parseLocalDate("2011-10-30"), DateUtility.parseLocalDateTime("2011-10-30 11:34:21.0"), DateUtility.parseLocalDate("2015-01-01"), DateUtility.parseLocalTime("11:34:21.0"),
+            "{ z : [ 1, 2, 3 ] }",
+            "It is a long established fact that a reader will be distracted by the readable content of a page when looking at its layout",
+            "abc",
+            "a medium piece of text 2",
+            "somewhat more text",
+            "this is a test 2".getBytes(),
+            false, "YYY")
+        .baselineValues(3,
+            "first_name_3", "last_name_3", "176 Bowery", "New York", "NY", 10012,
+            123090L, -3, new BigDecimal("55.12"),
+            false,
+            5.0, 5.1, 5.55,
+            DateUtility.parseLocalDate("2015-06-01"), DateUtility.parseLocalDateTime("2015-09-22 15:46:10.0"), DateUtility.parseLocalDate("1901-01-01"), DateUtility.parseLocalTime("16:00:01.0"),
+            "{ [ a, b, c ] }",
+            "Neque porro quisquam est qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit",
+            "abc",
+            "a medium piece of text 3",
+            "somewhat more text",
+            "this is a test 3".getBytes(),
+            true, "ZZZ")
+        .baselineValues(5,
+            null, null, null, null, null, null,
+            null, null, null,
+            null,
+            null, null, null,
+            null, null, null, null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null, "XXX")
+            .go();
   }
 
   @Test
@@ -132,4 +136,25 @@ public void testPhysicalPlanSubmission() throws Exception {
     testPhysicalPlanExecutionBasedOnQuery("select * from mysql.`drill_mysql_test`.person");
   }
 
+  @Test
+  public void emptyOutput() throws Exception {
+    String query = "select * from mysql.`drill_mysql_test`.person e limit 0";
+
+    test(query);
+  }
+
+  @Test
+  public void testCaseSensitiveTableNames() throws Exception {
+    test("use mysqlCaseInsensitive.`drill_mysql_test`");
+    // two table names match the filter ignoring the case
+    assertEquals(2, testSql("show tables like 'caseSensitiveTable'"));
+
+    test("use mysql.`drill_mysql_test`");
+    // single table matches the filter considering table name the case
+    assertEquals(1, testSql("show tables like 'caseSensitiveTable'"));
+
+    // checks that tables with names in different case are recognized correctly
+    assertEquals(1, testSql("describe caseSensitiveTable"));
+    assertEquals(2, testSql("describe CASESENSITIVETABLE"));
+  }
 }
diff --git a/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json b/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
index 4018d92478d..945ddeb53b8 100755
--- a/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
+++ b/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
@@ -4,15 +4,22 @@
           type    : "jdbc",
           driver  : "org.apache.derby.jdbc.ClientDriver",
           url     : "jdbc:derby://localhost:${derby.reserved.port}/memory:${derby.database.name};user=root;password=root",
+          caseInsensitiveTableNames: true,
           enabled : true
         },
         mysql : {
           type    : "jdbc",
-          enabled : true,
           driver  : "com.mysql.jdbc.Driver",
           url     : "jdbc:mysql://localhost:${mysql.reserved.port}/${mysql.database.name}?user=root&password=root&useJDBCCompliantTimezoneShift=true",
           enabled : true
-      }
+        },
+        mysqlCaseInsensitive : {
+          type    : "jdbc",
+          driver  : "com.mysql.jdbc.Driver",
+          url     : "jdbc:mysql://localhost:${mysql.reserved.port}/${mysql.database.name}?user=root&password=root&useJDBCCompliantTimezoneShift=true",
+          caseInsensitiveTableNames: true,
+          enabled : true
+        }
     }
 }
 
diff --git a/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql b/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
index 6875d99594e..92ad6ff57bd 100644
--- a/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
+++ b/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
@@ -3,12 +3,18 @@ set global time_zone = "+00:00";
 
 use drill_mysql_test;
 
-create table x (
+create table caseSensitiveTable (
   a   BLOB
 );
 
-insert into x (a) values ('this is a test');
+insert into caseSensitiveTable (a) values ('this is a test');
 
+create table CASESENSITIVETABLE (
+  a   BLOB,
+  b   BLOB
+);
+
+insert into CASESENSITIVETABLE (a, b) values ('this is a test', 'for case sensitive table names');
 
 create table person (
   person_id       INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
index 7497783e383..9e351f7b0db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterRel.java
@@ -51,7 +51,7 @@ public static DrillFilterRel convert(org.apache.drill.common.logical.data.Filter
   }
 
   public static DrillFilterRel create(RelNode child, RexNode condition) {
-    return new DrillFilterRel(child.getCluster(), child.getTraitSet(), child, condition);
+    return new DrillFilterRel(child.getCluster(), child.getTraitSet().plus(DRILL_LOGICAL), child, condition);
   }
 
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRel.java
index 2de63ab29ff..51bfb465b7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRel.java
@@ -27,7 +27,7 @@
 public interface DrillRel extends DrillRelNode {
   /** Calling convention for relational expressions that are "implemented" by
    * generating Drill logical plans. */
-  public static final Convention DRILL_LOGICAL = new Convention.Impl("LOGICAL", DrillRel.class);
+  Convention DRILL_LOGICAL = new Convention.Impl("LOGICAL", DrillRel.class);
 
   LogicalOperator implement(DrillImplementor implementor);
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
index a0b727d3f29..feccce029ad 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
@@ -44,6 +44,7 @@
 import static org.apache.calcite.rel.core.RelFactories.DEFAULT_SORT_FACTORY;
 import static org.apache.calcite.rel.core.RelFactories.DEFAULT_TABLE_SCAN_FACTORY;
 import static org.apache.calcite.rel.core.RelFactories.DEFAULT_VALUES_FACTORY;
+import static org.apache.drill.exec.planner.logical.DrillRel.DRILL_LOGICAL;
 
 /**
  * Contains factory implementation for creating various Drill Logical Rel nodes.
@@ -100,7 +101,7 @@ public RelNode createProject(RelNode child,
       final RelDataType rowType =
           RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames, null);
 
-      return DrillProjectRel.create(cluster, child.getTraitSet(), child, childExprs, rowType);
+      return DrillProjectRel.create(cluster, child.getTraitSet().plus(DRILL_LOGICAL), child, childExprs, rowType);
     }
   }
 
@@ -125,14 +126,14 @@ public RelNode createFilter(RelNode child, RexNode condition) {
     public RelNode createJoin(RelNode left, RelNode right,
                               RexNode condition, Set<CorrelationId> variablesSet,
                               JoinRelType joinType, boolean semiJoinDone) {
-      return new DrillJoinRel(left.getCluster(), left.getTraitSet(), left, right, condition, joinType);
+      return new DrillJoinRel(left.getCluster(), left.getTraitSet().plus(DRILL_LOGICAL), left, right, condition, joinType);
     }
 
     @Override
     public RelNode createJoin(RelNode left, RelNode right,
                               RexNode condition, JoinRelType joinType,
                               Set<String> variablesStopped, boolean semiJoinDone) {
-      return new DrillJoinRel(left.getCluster(), left.getTraitSet(), left, right, condition, joinType);
+      return new DrillJoinRel(left.getCluster(), left.getTraitSet().plus(DRILL_LOGICAL), left, right, condition, joinType);
     }
   }
 
@@ -145,7 +146,7 @@ public RelNode createJoin(RelNode left, RelNode right,
     @Override
     public RelNode createAggregate(RelNode input, boolean indicator, ImmutableBitSet groupSet,
                                    com.google.common.collect.ImmutableList<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
-      return new DrillAggregateRel(input.getCluster(), input.getTraitSet(), input, indicator, groupSet, groupSets, aggCalls);
+      return new DrillAggregateRel(input.getCluster(), input.getTraitSet().plus(DRILL_LOGICAL), input, indicator, groupSet, groupSets, aggCalls);
     }
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
index 7f7dbe9f70a..92a07c526d7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
@@ -21,6 +21,7 @@
 import com.fasterxml.jackson.core.SerializableString;
 import com.fasterxml.jackson.core.io.CharacterEscapes;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.sql.SqlDescribeSchema;
@@ -35,7 +36,6 @@
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
-import org.apache.drill.exec.store.dfs.FileSystemSchemaFactory;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.drill.exec.work.foreman.ForemanSetupException;
 
@@ -111,12 +111,12 @@ private void transformWorkspaces(List<String> names, Map configMap) {
     Object workspaces = configMap.remove("workspaces");
     if (workspaces != null) {
       Map map = (Map) workspaces;
-      String key = names.size() > 1 ? names.get(1) : FileSystemSchemaFactory.DEFAULT_WS_NAME;
+      String key = names.size() > 1 ? names.get(1) : SchemaFactory.DEFAULT_WS_NAME;
       Object workspace = map.get(key);
       if (workspace != null) {
         Map workspaceMap = (Map) map.get(key);
         configMap.putAll(workspaceMap);
-      } else if (FileSystemSchemaFactory.DEFAULT_WS_NAME.equals(key)) {
+      } else if (SchemaFactory.DEFAULT_WS_NAME.equals(key)) {
         configMap.putAll(mapper.convertValue(WorkspaceConfig.DEFAULT, Map.class));
       }
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindHardDistributionScans.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindHardDistributionScans.java
index 6af8aa4a125..7d87aa2c881 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindHardDistributionScans.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindHardDistributionScans.java
@@ -52,7 +52,15 @@ public RelNode visit(TableScan scan) {
     DrillTable unwrap;
     unwrap = scan.getTable().unwrap(DrillTable.class);
     if (unwrap == null) {
-      unwrap = scan.getTable().unwrap(DrillTranslatableTable.class).getDrillTable();
+      DrillTranslatableTable drillTranslatableTable = scan.getTable().unwrap(DrillTranslatableTable.class);
+      // For the case, when the underlying Table was obtained from Calcite,
+      // it extends neither DrillTable nor DrillTranslatableTable.
+      // Therefore DistributionAffinity type cannot be determined and single mode is rejected.
+      if (drillTranslatableTable == null) {
+        contains = true; // it rejects single mode.
+        return scan;
+      }
+      unwrap = drillTranslatableTable.getDrillTable();
     }
 
     try {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
index 4545169e7c7..4766e96312a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
@@ -26,6 +26,8 @@
  */
 public interface SchemaFactory {
 
+  String DEFAULT_WS_NAME = "default";
+
   /**
    * Register the schemas provided by this SchemaFactory implementation under the given parent schema.
    *
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index dd1c91c9b32..5656c550a15 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -17,8 +17,6 @@
  */
 package org.apache.drill.exec.store.dfs;
 
-import static org.apache.drill.exec.store.dfs.FileSystemSchemaFactory.DEFAULT_WS_NAME;
-
 import java.io.IOException;
 import java.net.URI;
 import java.util.ArrayList;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java
index 1a97e60d367..795cbd21d62 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java
@@ -47,8 +47,6 @@
  */
 public class FileSystemSchemaFactory extends AbstractSchemaFactory {
 
-  public static final String DEFAULT_WS_NAME = "default";
-
   public static final String LOCAL_FS_SCHEME = "file";
 
   private List<WorkspaceSchemaFactory> factories;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
index e73c1b6cfb8..1c7b2ab2d9e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
@@ -22,6 +22,7 @@
 import java.util.Map;
 import java.util.Optional;
 
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
@@ -43,11 +44,10 @@
   public static final String DFS_PLUGIN_NAME = "dfs";
 
   public static final String TMP_SCHEMA = "tmp";
-  public static final String DEFAULT_SCHEMA = "default";
   public static final String ROOT_SCHEMA = "root";
 
   public static final String DFS_TMP_SCHEMA = DFS_PLUGIN_NAME + "." + TMP_SCHEMA;
-  public static final String DFS_DEFAULT_SCHEMA = DFS_PLUGIN_NAME + "." + DEFAULT_SCHEMA;
+  public static final String DFS_DEFAULT_SCHEMA = DFS_PLUGIN_NAME + "." + SchemaFactory.DEFAULT_WS_NAME;
   public static final String DFS_ROOT_SCHEMA = DFS_PLUGIN_NAME + "." + ROOT_SCHEMA;
 
   public static final String UNIT_TEST_PROP_PREFIX = "drillJDBCUnitTests";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
index 71aa2407aca..250f6468c8b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
@@ -17,7 +17,6 @@
  */
 package org.apache.drill.test;
 
-import static org.apache.drill.exec.util.StoragePluginTestUtils.DEFAULT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.ROOT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
 import static org.hamcrest.core.StringContains.containsString;
@@ -35,6 +34,7 @@
 import java.util.Properties;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.test.DrillTestWrapper.TestServices;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.DrillProperties;
@@ -194,7 +194,7 @@ private static void openClient(Properties properties) throws Exception {
       StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry,
         dirTestWatcher.getRootDir(), ROOT_SCHEMA);
       StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry,
-        dirTestWatcher.getRootDir(), DEFAULT_SCHEMA);
+        dirTestWatcher.getRootDir(), SchemaFactory.DEFAULT_WS_NAME);
     }
 
     if (!properties.containsKey(DrillProperties.DRILLBIT_CONNECTION)) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
index 996898ee470..a9d2977b4b6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
@@ -33,6 +33,7 @@
 import java.util.Optional;
 import java.util.Properties;
 
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.test.DrillTestWrapper.TestServices;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
@@ -61,7 +62,6 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 
-import static org.apache.drill.exec.util.StoragePluginTestUtils.DEFAULT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.ROOT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
@@ -267,7 +267,7 @@ private void configureStoragePlugins(Drillbit bit) throws Exception {
 
     StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getDfsTestTmpDir(), TMP_SCHEMA);
     StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getRootDir(), ROOT_SCHEMA);
-    StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getRootDir(), DEFAULT_SCHEMA);
+    StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getRootDir(), SchemaFactory.DEFAULT_WS_NAME);
 
     // Create the mock data plugin
 
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
index b7e61b94f18..820044e4141 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillConnectionImpl.java
@@ -55,6 +55,7 @@
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.server.Drillbit;
 import org.apache.drill.exec.server.RemoteServiceSet;
+import org.apache.drill.exec.store.SchemaFactory;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.jdbc.AlreadyClosedSqlException;
 import org.apache.drill.jdbc.DrillConnection;
@@ -65,7 +66,6 @@
 
 import org.apache.drill.shaded.guava.com.google.common.base.Throwables;
 
-import static org.apache.drill.exec.util.StoragePluginTestUtils.DEFAULT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_PLUGIN_NAME;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.ROOT_SCHEMA;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
@@ -666,7 +666,7 @@ private static void makeTmpSchemaLocationsUnique(StoragePluginRegistry pluginReg
         if (dfsDefaultPath == null) {
           logger.warn(logMessage, UNIT_TEST_DFS_DEFAULT_PROP);
         } else {
-          updateSchemaLocation(DFS_PLUGIN_NAME, pluginRegistry, new File(dfsDefaultPath), DEFAULT_SCHEMA);
+          updateSchemaLocation(DFS_PLUGIN_NAME, pluginRegistry, new File(dfsDefaultPath), SchemaFactory.DEFAULT_WS_NAME);
         }
       }
     } catch(Throwable e) {


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services