You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by hv...@apache.org on 2016/11/30 11:59:27 UTC

spark git commit: [SPARK-17932][SQL] Support SHOW TABLES EXTENDED LIKE 'identifier_with_wildcards' statement

Repository: spark
Updated Branches:
  refs/heads/master 2eb093dec -> c24076dcf


[SPARK-17932][SQL] Support SHOW TABLES EXTENDED LIKE 'identifier_with_wildcards' statement

## What changes were proposed in this pull request?

Currently we haven't implemented `SHOW TABLE EXTENDED` in Spark 2.0. This PR is to implement the statement.
Goals:
1. Support `SHOW TABLES EXTENDED LIKE 'identifier_with_wildcards'`;
2. Explicitly output an unsupported error message for `SHOW TABLES [EXTENDED] ... PARTITION` statement;
3. Improve test cases for `SHOW TABLES` statement.

## How was this patch tested?
1. Add new test cases in file `show-tables.sql`.
2. Modify tests for `SHOW TABLES` in `DDLSuite`.

Author: jiangxingbo <ji...@gmail.com>

Closes #15958 from jiangxb1987/show-table-extended.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c24076dc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c24076dc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c24076dc

Branch: refs/heads/master
Commit: c24076dcf867f8d7bb328055ca817bc09ad0c1d1
Parents: 2eb093d
Author: jiangxingbo <ji...@gmail.com>
Authored: Wed Nov 30 03:59:25 2016 -0800
Committer: Herman van Hovell <hv...@databricks.com>
Committed: Wed Nov 30 03:59:25 2016 -0800

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/parser/SqlBase.g4 |   4 +-
 .../spark/sql/execution/SparkSqlParser.scala    |  14 +-
 .../spark/sql/execution/command/tables.scala    |  24 ++-
 .../resources/sql-tests/inputs/show-tables.sql  |  31 +++
 .../sql-tests/results/show-tables.sql.out       | 187 +++++++++++++++++++
 .../spark/sql/execution/command/DDLSuite.scala  |  22 +--
 6 files changed, 257 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index df85c70..075c73d 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -120,8 +120,8 @@ statement
         (USING resource (',' resource)*)?                              #createFunction
     | DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName              #dropFunction
     | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN)? statement    #explain
-    | SHOW TABLES ((FROM | IN) db=identifier)?
-        (LIKE? pattern=STRING)?                                        #showTables
+    | SHOW TABLES EXTENDED? ((FROM | IN) db=identifier)?
+        (LIKE? pattern=STRING)? partitionSpec?                         #showTables
     | SHOW DATABASES (LIKE pattern=STRING)?                            #showDatabases
     | SHOW TBLPROPERTIES table=tableIdentifier
         ('(' key=tablePropertyKey ')')?                                #showTblProperties

http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 5f89a22..ffd6b01 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -126,13 +126,23 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
    * Create a [[ShowTablesCommand]] logical plan.
    * Example SQL :
    * {{{
-   *   SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+   *   SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']
+   *   [PARTITION(partition_spec)];
    * }}}
    */
   override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
+    if (ctx.partitionSpec != null) {
+      operationNotAllowed("SHOW TABLES [EXTENDED] ... PARTITION", ctx)
+    }
+    if (ctx.EXTENDED != null && ctx.pattern == null) {
+      throw new AnalysisException(
+        s"SHOW TABLES EXTENDED must have identifier_with_wildcards specified.")
+    }
+
     ShowTablesCommand(
       Option(ctx.db).map(_.getText),
-      Option(ctx.pattern).map(string))
+      Option(ctx.pattern).map(string),
+      ctx.EXTENDED != null)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 57d66f1..dc0720d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -590,18 +590,25 @@ case class DescribeTableCommand(
  * If a databaseName is not given, the current database will be used.
  * The syntax of using this command in SQL is:
  * {{{
- *   SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ *   SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
  * }}}
  */
 case class ShowTablesCommand(
     databaseName: Option[String],
-    tableIdentifierPattern: Option[String]) extends RunnableCommand {
+    tableIdentifierPattern: Option[String],
+    isExtended: Boolean = false) extends RunnableCommand {
 
-  // The result of SHOW TABLES has three columns: database, tableName and isTemporary.
+  // The result of SHOW TABLES has three basic columns: database, tableName and isTemporary.
+  // If `isExtended` is true, append column `information` to the output columns.
   override val output: Seq[Attribute] = {
+    val tableExtendedInfo = if (isExtended) {
+      AttributeReference("information", StringType, nullable = false)() :: Nil
+    } else {
+      Nil
+    }
     AttributeReference("database", StringType, nullable = false)() ::
       AttributeReference("tableName", StringType, nullable = false)() ::
-      AttributeReference("isTemporary", BooleanType, nullable = false)() :: Nil
+      AttributeReference("isTemporary", BooleanType, nullable = false)() :: tableExtendedInfo
   }
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
@@ -612,8 +619,15 @@ case class ShowTablesCommand(
     val tables =
       tableIdentifierPattern.map(catalog.listTables(db, _)).getOrElse(catalog.listTables(db))
     tables.map { tableIdent =>
+      val database = tableIdent.database.getOrElse("")
+      val tableName = tableIdent.table
       val isTemp = catalog.isTemporaryTable(tableIdent)
-      Row(tableIdent.database.getOrElse(""), tableIdent.table, isTemp)
+      if (isExtended) {
+        val information = catalog.getTempViewOrPermanentTableMetadata(tableIdent).toString
+        Row(database, tableName, isTemp, s"${information}\n")
+      } else {
+        Row(database, tableName, isTemp)
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
new file mode 100644
index 0000000..a16c398
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
@@ -0,0 +1,31 @@
+-- Test data.
+CREATE DATABASE showdb;
+USE showdb;
+CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String);
+ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1);
+CREATE TABLE show_t2(b String, d Int);
+CREATE TEMPORARY VIEW show_t3(e int) USING parquet;
+CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1;
+
+-- SHOW TABLES
+SHOW TABLES;
+SHOW TABLES IN showdb;
+
+-- SHOW TABLES WITH wildcard match
+SHOW TABLES 'show_t*';
+SHOW TABLES LIKE 'show_t1*|show_t2*';
+SHOW TABLES IN showdb 'show_t*';
+
+-- SHOW TABLES EXTENDED
+-- Ignore these because there exist timestamp results, e.g. `Created`.
+-- SHOW TABLES EXTENDED LIKE 'show_t*';
+SHOW TABLES EXTENDED;
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us');
+
+-- Clean Up
+DROP TABLE show_t1;
+DROP TABLE show_t2;
+DROP VIEW  show_t3;
+DROP VIEW  global_temp.show_t4;
+USE default;
+DROP DATABASE showdb;

http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
new file mode 100644
index 0000000..a4f4112
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -0,0 +1,187 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 20
+
+
+-- !query 0
+CREATE DATABASE showdb
+-- !query 0 schema
+struct<>
+-- !query 0 output
+
+
+
+-- !query 1
+USE showdb
+-- !query 1 schema
+struct<>
+-- !query 1 output
+
+
+
+-- !query 2
+CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String)
+-- !query 2 schema
+struct<>
+-- !query 2 output
+
+
+
+-- !query 3
+ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1)
+-- !query 3 schema
+struct<>
+-- !query 3 output
+
+
+
+-- !query 4
+CREATE TABLE show_t2(b String, d Int)
+-- !query 4 schema
+struct<>
+-- !query 4 output
+
+
+
+-- !query 5
+CREATE TEMPORARY VIEW show_t3(e int) USING parquet
+-- !query 5 schema
+struct<>
+-- !query 5 output
+
+
+
+-- !query 6
+CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1
+-- !query 6 schema
+struct<>
+-- !query 6 output
+
+
+
+-- !query 7
+SHOW TABLES
+-- !query 7 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 7 output
+arraydata
+mapdata
+show_t1
+show_t2
+show_t3
+testdata
+
+
+-- !query 8
+SHOW TABLES IN showdb
+-- !query 8 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 8 output
+arraydata
+mapdata
+show_t1
+show_t2
+show_t3
+testdata
+
+
+-- !query 9
+SHOW TABLES 'show_t*'
+-- !query 9 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 9 output
+show_t1
+show_t2
+show_t3
+
+
+-- !query 10
+SHOW TABLES LIKE 'show_t1*|show_t2*'
+-- !query 10 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 10 output
+show_t1
+show_t2
+
+
+-- !query 11
+SHOW TABLES IN showdb 'show_t*'
+-- !query 11 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 11 output
+show_t1
+show_t2
+show_t3
+
+
+-- !query 12
+SHOW TABLES EXTENDED
+-- !query 12 schema
+struct<>
+-- !query 12 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+SHOW TABLES EXTENDED must have identifier_with_wildcards specified.
+== SQL ==
+SHOW TABLES EXTENDED
+
+
+-- !query 13
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+-- !query 13 schema
+struct<>
+-- !query 13 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Operation not allowed: SHOW TABLES [EXTENDED] ... PARTITION(line 1, pos 0)
+
+== SQL ==
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+^^^
+
+
+-- !query 14
+DROP TABLE show_t1
+-- !query 14 schema
+struct<>
+-- !query 14 output
+
+
+
+-- !query 15
+DROP TABLE show_t2
+-- !query 15 schema
+struct<>
+-- !query 15 output
+
+
+
+-- !query 16
+DROP VIEW  show_t3
+-- !query 16 schema
+struct<>
+-- !query 16 output
+
+
+
+-- !query 17
+DROP VIEW  global_temp.show_t4
+-- !query 17 schema
+struct<>
+-- !query 17 output
+
+
+
+-- !query 18
+USE default
+-- !query 18 schema
+struct<>
+-- !query 18 output
+
+
+
+-- !query 19
+DROP DATABASE showdb
+-- !query 19 schema
+struct<>
+-- !query 19 output
+

http://git-wip-us.apache.org/repos/asf/spark/blob/c24076dc/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index a602d75..2a004ba 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -901,24 +901,14 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
           |  Table 'test1'
           |)
         """.stripMargin)
-      checkAnswer(
-        sql("SHOW TABLES IN default 'show1*'"),
-        Row("", "show1a", true) :: Nil)
-
-      checkAnswer(
-        sql("SHOW TABLES IN default 'show1*|show2*'"),
-        Row("", "show1a", true) ::
-          Row("", "show2b", true) :: Nil)
-
-      checkAnswer(
-        sql("SHOW TABLES 'show1*|show2*'"),
-        Row("", "show1a", true) ::
-          Row("", "show2b", true) :: Nil)
-
       assert(
-        sql("SHOW TABLES").count() >= 2)
+        sql("SHOW TABLES EXTENDED LIKE 'show*'").count() >= 2)
       assert(
-        sql("SHOW TABLES IN default").count() >= 2)
+        sql("SHOW TABLES EXTENDED LIKE 'show*'").schema ==
+          StructType(StructField("database", StringType, false) ::
+            StructField("tableName", StringType, false) ::
+            StructField("isTemporary", BooleanType, false) ::
+            StructField("information", StringType, false) :: Nil))
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org