You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/06/05 21:00:39 UTC
git commit: [SPARK-2036] [SQL] CaseConversionExpression should check
if the evaluated value is null.
Repository: spark
Updated Branches:
refs/heads/master 89cdbb087 -> e4c11eef2
[SPARK-2036] [SQL] CaseConversionExpression should check if the evaluated value is null.
`CaseConversionExpression` should check if the evaluated value is `null`.
Author: Takuya UESHIN <ue...@happy-camper.st>
Closes #982 from ueshin/issues/SPARK-2036 and squashes the following commits:
61e1c54 [Takuya UESHIN] Add check if the evaluated value is null.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e4c11eef
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e4c11eef
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e4c11eef
Branch: refs/heads/master
Commit: e4c11eef2f64df0b6a432f40b669486d91ca6352
Parents: 89cdbb0
Author: Takuya UESHIN <ue...@happy-camper.st>
Authored: Thu Jun 5 12:00:31 2014 -0700
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Thu Jun 5 12:00:31 2014 -0700
----------------------------------------------------------------------
.../sql/catalyst/expressions/stringOperations.scala | 8 ++++++--
.../scala/org/apache/spark/sql/SQLQuerySuite.scala | 14 ++++++++++++++
.../test/scala/org/apache/spark/sql/TestData.scala | 8 ++++++++
3 files changed, 28 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/e4c11eef/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
index dcded07..4203034 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
@@ -81,8 +81,12 @@ trait CaseConversionExpression {
def dataType: DataType = StringType
override def eval(input: Row): Any = {
- val converted = child.eval(input)
- convert(converted.toString)
+ val evaluated = child.eval(input)
+ if (evaluated == null) {
+ null
+ } else {
+ convert(evaluated.toString)
+ }
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/e4c11eef/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 95860e6..e2ad391 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -322,6 +322,13 @@ class SQLQuerySuite extends QueryTest {
(2, "B"),
(3, "C"),
(4, "D")))
+
+ checkAnswer(
+ sql("SELECT n, UPPER(s) FROM nullStrings"),
+ Seq(
+ (1, "ABC"),
+ (2, "ABC"),
+ (3, null)))
}
test("system function lower()") {
@@ -334,6 +341,13 @@ class SQLQuerySuite extends QueryTest {
(4, "d"),
(5, "e"),
(6, "f")))
+
+ checkAnswer(
+ sql("SELECT n, LOWER(s) FROM nullStrings"),
+ Seq(
+ (1, "abc"),
+ (2, "abc"),
+ (3, null)))
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/e4c11eef/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index 944f520..876bd16 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -106,4 +106,12 @@ object TestData {
NullInts(null) :: Nil
)
nullInts.registerAsTable("nullInts")
+
+ case class NullStrings(n: Int, s: String)
+ val nullStrings =
+ TestSQLContext.sparkContext.parallelize(
+ NullStrings(1, "abc") ::
+ NullStrings(2, "ABC") ::
+ NullStrings(3, null) :: Nil)
+ nullStrings.registerAsTable("nullStrings")
}