You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2018/07/18 13:51:53 UTC

[GitHub] arina-ielchiieva closed pull request #1385: DRILL-6612: Query fails with AssertionError when joining persistent a…

arina-ielchiieva closed pull request #1385: DRILL-6612: Query fails with AssertionError when joining persistent a…
URL: https://github.com/apache/drill/pull/1385
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
index cde46f2c28b..e6b8f49499d 100644
--- a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
@@ -26,6 +26,7 @@
 import org.apache.calcite.schema.impl.AbstractSchema;
 import org.apache.calcite.util.BuiltInMethod;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.exec.planner.sql.SchemaUtilites;
 import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.store.StoragePluginRegistry;
@@ -83,9 +84,9 @@ public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
       }
 
       // Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
-      String[] paths = schemaName.split("\\.");
-      if (paths.length == 2) {
-        plugin = getSchemaFactories().getPlugin(paths[0]);
+      List<String> paths = SchemaUtilites.getSchemaPathAsList(schemaName);
+      if (paths.size() == 2) {
+        plugin = getSchemaFactories().getPlugin(paths.get(0));
         if (plugin == null) {
           return;
         }
@@ -95,7 +96,7 @@ public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
         plugin.registerSchemas(schemaConfig, thisPlus);
 
         // Load second level schemas for this storage plugin
-        final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
+        final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths.get(0));
         final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
         for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
           secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
index 7d42e57e9ad..a2623634916 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
@@ -27,6 +27,7 @@
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory;
 
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
@@ -73,7 +74,7 @@ public static SchemaPlus findSchema(final SchemaPlus defaultSchema, final List<S
    * @return found schema path
    */
   public static SchemaPlus findSchema(final SchemaPlus defaultSchema, final String schemaPath) {
-    final List<String> schemaPathAsList = Lists.newArrayList(schemaPath.split("\\."));
+    final List<String> schemaPathAsList = getSchemaPathAsList(schemaPath);
     return findSchema(defaultSchema, schemaPathAsList);
   }
 
@@ -144,6 +145,11 @@ public static String getSchemaPath(List<String> schemaPath) {
     return SCHEMA_PATH_JOINER.join(schemaPath);
   }
 
+  /** Utility method to get the list with schema path components for given schema path string. */
+  public static List<String> getSchemaPathAsList(String schemaPath) {
+    return Arrays.asList(schemaPath.split("\\."));
+  }
+
   /** Utility method to get the schema path as list for given schema instance. */
   public static List<String> getSchemaPathAsList(SchemaPlus schema) {
     if (isRootSchema(schema)) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
index eee141e77a1..d4da23f43e7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.planner.sql;
 
 import java.math.BigDecimal;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -52,7 +53,6 @@
 import org.apache.calcite.sql.SqlCall;
 import org.apache.calcite.sql.SqlIdentifier;
 import org.apache.calcite.sql.SqlNode;
-import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.SqlOperatorTable;
 import org.apache.calcite.sql.parser.SqlParseException;
 import org.apache.calcite.sql.parser.SqlParser;
@@ -251,24 +251,24 @@ protected void validateFrom(
         SqlValidatorScope scope) {
       switch (node.getKind()) {
         case AS:
-          if (((SqlCall) node).operand(0) instanceof SqlIdentifier) {
-            SqlIdentifier tempNode = ((SqlCall) node).operand(0);
-            DrillCalciteCatalogReader catalogReader = (SqlConverter.DrillCalciteCatalogReader) getCatalogReader();
-
-            // Check the schema and throw a valid SchemaNotFound exception instead of TableNotFound exception.
-            if (catalogReader.getTable(Lists.newArrayList(tempNode.names)) == null) {
-              catalogReader.isValidSchema(tempNode.names);
-            }
-            changeNamesIfTableIsTemporary(tempNode);
+          SqlNode sqlNode = ((SqlCall) node).operand(0);
+          switch (sqlNode.getKind()) {
+            case IDENTIFIER:
+              SqlIdentifier tempNode = (SqlIdentifier) sqlNode;
+              DrillCalciteCatalogReader catalogReader = (SqlConverter.DrillCalciteCatalogReader) getCatalogReader();
+
+              changeNamesIfTableIsTemporary(tempNode);
+
+              // Check the schema and throw a valid SchemaNotFound exception instead of TableNotFound exception.
+              if (catalogReader.getTable(tempNode.names) == null) {
+                catalogReader.isValidSchema(tempNode.names);
+              }
+              break;
+            case UNNEST:
+              if (((SqlCall) node).operandCount() < 3) {
+                throw RESOURCE.validationError("Alias table and column name are required for UNNEST").ex();
+              }
           }
-          else  if (((SqlCall) node).operand(0).getKind() == SqlKind.UNNEST) {
-            if (((SqlCall) node).operandCount() < 3) {
-              throw RESOURCE.validationError("Alias table and column name are required for UNNEST").ex();
-            }
-          }
-          break;
-        default:
-          break;
       }
       super.validateFrom(node, targetRowType, scope);
     }
@@ -626,7 +626,9 @@ void disallowTemporaryTables() {
       if (mightBeTemporaryTable(names, session.getDefaultSchemaPath(), drillConfig)) {
         String temporaryTableName = session.resolveTemporaryTableName(names.get(names.size() - 1));
         if (temporaryTableName != null) {
-          return Lists.newArrayList(temporarySchema, temporaryTableName);
+          List<String> temporaryNames = new ArrayList<>(SchemaUtilites.getSchemaPathAsList(temporarySchema));
+          temporaryNames.add(temporaryTableName);
+          return temporaryNames;
         }
       }
       return null;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
index 4edeadce3b4..40d5ef09d8b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserSession.java
@@ -26,7 +26,6 @@
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 
 import org.apache.calcite.schema.Schema;
 import org.apache.calcite.schema.SchemaPlus;
@@ -196,7 +195,7 @@ public int getQueryCount() {
    */
   public void setDefaultSchemaPath(String newDefaultSchemaPath, SchemaPlus currentDefaultSchema)
       throws ValidationException {
-    final List<String> newDefaultPathAsList = Lists.newArrayList(newDefaultSchemaPath.split("\\."));
+    final List<String> newDefaultPathAsList = SchemaUtilites.getSchemaPathAsList(newDefaultSchemaPath);
     SchemaPlus newDefault;
 
     // First try to find the given schema relative to the current default schema.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
index c4ababf1c43..498f92ea8bc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
@@ -427,6 +427,25 @@ public void testDropTemporaryTableAsViewWithException() throws Exception {
     test("drop view %s.%s", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
+  @Test
+  public void testJoinTemporaryWithPersistentTable() throws Exception {
+    String temporaryTableName = "temp_tab";
+    String persistentTableName = "pers_tab";
+    String query = String.format("select * from `%s` a join `%s` b on a.c1 = b.c2",
+        persistentTableName, temporaryTableName);
+
+    test("use %s", temp2_schema);
+    test("create TEMPORARY table %s as select '12312' as c2", temporaryTableName);
+    test("create table %s as select '12312' as c1", persistentTableName);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .baselineColumns("c1", "c2")
+        .baselineValues("12312", "12312")
+        .go();
+  }
+
   private void expectUserRemoteExceptionWithMessage(String message) {
     thrown.expect(UserRemoteException.class);
     thrown.expectMessage(containsString(message));


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services