You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/07/23 14:08:14 UTC
[spark] branch branch-3.0 updated:
[SPARK-32251][SQL][TESTS][FOLLOWUP] improve SQL keyword test
This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 01c88be [SPARK-32251][SQL][TESTS][FOLLOWUP] improve SQL keyword test
01c88be is described below
commit 01c88bedc87c07194abfab8d9d7e03271ffd9757
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Thu Jul 23 14:02:38 2020 +0000
[SPARK-32251][SQL][TESTS][FOLLOWUP] improve SQL keyword test
Improve the `SQLKeywordSuite` so that:
1. it checks keywords under default mode as well
2. it checks if there are typos in the doc (found one and fixed in this PR)
better test coverage
no
N/A
Closes #29200 from cloud-fan/test.
Authored-by: Wenchen Fan <we...@databricks.com>
Signed-off-by: Wenchen Fan <we...@databricks.com>
(cherry picked from commit aa54dcf193a2149182da779191cf12f087305726)
Signed-off-by: Wenchen Fan <we...@databricks.com>
---
docs/sql-ref-ansi-compliance.md | 2 +-
.../apache/spark/sql/catalyst/parser/SqlBase.g4 | 2 +
.../spark/sql/catalyst/SQLKeywordSuite.scala | 46 +++++++++++++++++++---
3 files changed, 43 insertions(+), 7 deletions(-)
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 6488ad9..1936161 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -264,7 +264,7 @@ Below is a list of all the keywords in Spark SQL.
|MAP|non-reserved|non-reserved|non-reserved|
|MATCHED|non-reserved|non-reserved|non-reserved|
|MERGE|non-reserved|non-reserved|non-reserved|
-|MINUS|not-reserved|strict-non-reserved|non-reserved|
+|MINUS|non-reserved|strict-non-reserved|non-reserved|
|MINUTE|reserved|non-reserved|reserved|
|MONTH|reserved|non-reserved|reserved|
|MSCK|non-reserved|non-reserved|non-reserved|
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index e02bcda..df6ff9f 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -1200,6 +1200,7 @@ strictNonReserved
;
nonReserved
+//--DEFAULT-NON-RESERVED-START
: ADD
| AFTER
| ALL
@@ -1438,6 +1439,7 @@ nonReserved
| WINDOW
| WITH
| YEAR
+//--DEFAULT-NON-RESERVED-END
;
// NOTE: If you add a new token in the list below, you should update the list of keywords
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala
index 3d41d02..082b011 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala
@@ -38,7 +38,7 @@ trait SQLKeywordUtils extends SQLHelper {
}
// each element is an array of 4 string: the keyword name, reserve or not in Spark ANSI mode,
- // Spark non-ANSI mode, and the SQL standard.
+ // Spark default mode, and the SQL standard.
val keywordsInDoc: Array[Array[String]] = {
val docPath = {
java.nio.file.Paths.get(sparkHome, "docs", "sql-ref-ansi-compliance.md").toFile
@@ -135,6 +135,19 @@ trait SQLKeywordUtils extends SQLHelper {
}
val reservedKeywordsInAnsiMode = allCandidateKeywords -- nonReservedKeywordsInAnsiMode
+
+ val nonReservedKeywordsInDefaultMode: Set[String] = {
+ val kwDef = """\s*[\|:]\s*([A-Z_]+)\s*""".r
+ parseAntlrGrammars("//--DEFAULT-NON-RESERVED-START", "//--DEFAULT-NON-RESERVED-END") {
+ // Parses a pattern, e.g., ` | AFTER`
+ case kwDef(symbol) =>
+ if (symbolsToExpandIntoDifferentLiterals.contains(symbol)) {
+ symbolsToExpandIntoDifferentLiterals(symbol)
+ } else {
+ symbol :: Nil
+ }
+ }
+ }
}
class SQLKeywordSuite extends SparkFunSuite with SQLKeywordUtils {
@@ -146,11 +159,32 @@ class SQLKeywordSuite extends SparkFunSuite with SQLKeywordUtils {
}
}
- test("Spark keywords are documented correctly") {
- val reservedKeywordsInDoc = keywordsInDoc.filter(_.apply(1) == "reserved").map(_.head).toSet
- if (reservedKeywordsInAnsiMode != reservedKeywordsInDoc) {
- val misImplemented = (reservedKeywordsInDoc -- reservedKeywordsInAnsiMode).toSeq.sorted
- fail("Some keywords are documented as reserved but actually not: " +
+ test("Spark keywords are documented correctly under ANSI mode") {
+ // keywords under ANSI mode should either be reserved or non-reserved.
+ keywordsInDoc.map(_.apply(1)).foreach { desc =>
+ assert(desc == "reserved" || desc == "non-reserved")
+ }
+
+ val nonReservedInDoc = keywordsInDoc.filter(_.apply(1) == "non-reserved").map(_.head).toSet
+ if (nonReservedKeywordsInAnsiMode != nonReservedInDoc) {
+ val misImplemented = ((nonReservedInDoc -- nonReservedKeywordsInAnsiMode) ++
+ (nonReservedKeywordsInAnsiMode -- nonReservedInDoc)).toSeq.sorted
+ fail("Some keywords are documented and implemented inconsistently: " +
+ misImplemented.mkString(", "))
+ }
+ }
+
+ test("Spark keywords are documented correctly under default mode") {
+ // keywords under default mode should either be strict-non-reserved or non-reserved.
+ keywordsInDoc.map(_.apply(2)).foreach { desc =>
+ assert(desc == "strict-non-reserved" || desc == "non-reserved")
+ }
+
+ val nonReservedInDoc = keywordsInDoc.filter(_.apply(2) == "non-reserved").map(_.head).toSet
+ if (nonReservedKeywordsInDefaultMode != nonReservedInDoc) {
+ val misImplemented = ((nonReservedInDoc -- nonReservedKeywordsInDefaultMode) ++
+ (nonReservedKeywordsInDefaultMode -- nonReservedInDoc)).toSeq.sorted
+ fail("Some keywords are documented and implemented inconsistently: " +
misImplemented.mkString(", "))
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org