You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/08/18 18:31:25 UTC
[spark] branch master updated: [SPARK-40136][SQL] Fix the fragment of SQL query contexts
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e00c1209021 [SPARK-40136][SQL] Fix the fragment of SQL query contexts
e00c1209021 is described below
commit e00c12090217f1fa378cd99c1cc596033a065326
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Thu Aug 18 21:31:07 2022 +0300
[SPARK-40136][SQL] Fix the fragment of SQL query contexts
### What changes were proposed in this pull request?
In the PR, I propose to change implementation of `fragment` in `SQLQueryContext` to get the fragment correctly. Before the changes, `fragment` misses the last char because `originStopIndex.get` points to the last char of fragments but `substring()` gets a sub-string up to `endIndex - 1`.
### Why are the changes needed?
It fixes a bug.
### Does this PR introduce _any_ user-facing change?
Yes.
### How was this patch tested?
By running new test suite:
```
$ build/sbt "test:testOnly *SQLQueryContextSuite"
$ build/sbt "test:testOnly *TreeNodeSuite"
$ build/sbt "test:testOnly *QueryExecutionAnsiErrorsSuite"
```
Closes #37566 from MaxGekk/fix-fragment.
Authored-by: Max Gekk <ma...@gmail.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
---
.../spark/sql/catalyst/trees/SQLQueryContext.scala | 2 +-
.../sql/catalyst/trees/SQLQueryContextSuite.scala | 49 ++++++++++++++++++++++
.../spark/sql/catalyst/trees/TreeNodeSuite.scala | 2 +-
.../sql/errors/QueryExecutionAnsiErrorsSuite.scala | 12 +++---
4 files changed, 57 insertions(+), 8 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
index a8806dbad4d..2f6b0f885a0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
@@ -120,7 +120,7 @@ case class SQLQueryContext(
if (!isValid) {
""
} else {
- sqlText.get.substring(originStartIndex.get, originStopIndex.get)
+ sqlText.get.substring(originStartIndex.get, originStopIndex.get + 1)
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContextSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContextSuite.scala
new file mode 100644
index 00000000000..61fe90093d2
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContextSuite.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.trees
+
+import org.apache.spark.SparkFunSuite
+import org.apache.spark.sql.catalyst.plans.SQLHelper
+
+class SQLQueryContextSuite extends SparkFunSuite with SQLHelper {
+
+ test("SPARK-40136: the length of the fragment in query context") {
+ def getFragment(sqlText: String, start: Int, stop: Int): String = {
+ val context = SQLQueryContext(
+ line = None,
+ startPosition = None,
+ originStartIndex = Some(start),
+ originStopIndex = Some(stop),
+ sqlText = Some(sqlText),
+ originObjectType = None,
+ originObjectName = None)
+ context.fragment
+ }
+ Seq(
+ ("select 1 / 0", 7, 11) -> "1 / 0",
+ ("select 1 / 0", 7, 7) -> "1",
+ ("", 7, 7) -> "",
+ // Empty fragment for invalid indexes
+ ("select 1 / 0", -1, 11) -> "",
+ ("select 1 / 0", 7, 12) -> "",
+ ("select 1 / 0", 11, 7) -> ""
+ ).foreach { case ((sqlText, start, stop), expectedFragment) =>
+ assert(getFragment(sqlText, start, stop) === expectedFragment)
+ }
+ }
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
index 442bd01aa5b..bef88a7c0a3 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
@@ -888,7 +888,7 @@ class TreeNodeSuite extends SparkFunSuite with SQLHelper {
val expectedFragment =
"""cast('a'
|as /* comment */
- |int)""".stripMargin
+ |int),""".stripMargin
assert(origin.context.summary == expectedSummary)
assert(origin.context.startIndex == origin.startIndex.get)
assert(origin.context.stopIndex == origin.stopIndex.get)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 1ebca5f5f6a..837880a1d6d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -49,7 +49,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
errorClass = "DIVIDE_BY_ZERO",
sqlState = "22012",
parameters = Map("config" -> ansiConf),
- context = ExpectedContext(fragment = "6/", start = 7, stop = 9))
+ context = ExpectedContext(fragment = "6/0", start = 7, stop = 9))
}
test("INTERVAL_DIVIDED_BY_ZERO: interval divided by zero") {
@@ -60,7 +60,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
errorClass = "INTERVAL_DIVIDED_BY_ZERO",
sqlState = "22012",
parameters = Map.empty[String, String],
- context = ExpectedContext(fragment = "interval 1 day / ", start = 7, stop = 24))
+ context = ExpectedContext(fragment = "interval 1 day / 0", start = 7, stop = 24))
}
test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
@@ -86,7 +86,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
"scale" -> "1",
"config" -> ansiConf),
context = ExpectedContext(
- fragment = "CAST('66666666666666.666' AS DECIMAL(8, 1)",
+ fragment = "CAST('66666666666666.666' AS DECIMAL(8, 1))",
start = 7,
stop = 49))
}
@@ -98,7 +98,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
},
errorClass = "INVALID_ARRAY_INDEX",
parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig" -> ansiConf),
- context = ExpectedContext(fragment = "array(1, 2, 3, 4, 5)[8", start = 7, stop = 29))
+ context = ExpectedContext(fragment = "array(1, 2, 3, 4, 5)[8]", start = 7, stop = 29))
}
test("INVALID_ARRAY_INDEX_IN_ELEMENT_AT: element_at from array") {
@@ -109,7 +109,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
errorClass = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
parameters = Map("indexValue" -> "8", "arraySize" -> "5", "ansiConfig" -> ansiConf),
context = ExpectedContext(
- fragment = "element_at(array(1, 2, 3, 4, 5), 8",
+ fragment = "element_at(array(1, 2, 3, 4, 5), 8)",
start = 7,
stop = 41))
}
@@ -126,7 +126,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
"targetType" -> "\"DOUBLE\"",
"ansiConfig" -> ansiConf),
context = ExpectedContext(
- fragment = "CAST('111111111111xe23' AS DOUBLE",
+ fragment = "CAST('111111111111xe23' AS DOUBLE)",
start = 7,
stop = 40))
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org