You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2018/09/07 06:36:34 UTC
spark git commit: [SPARK-12321][SQL][FOLLOW-UP] Add tests for
fromString
Repository: spark
Updated Branches:
refs/heads/master 6d7bc5af4 -> f96a8bf8f
[SPARK-12321][SQL][FOLLOW-UP] Add tests for fromString
## What changes were proposed in this pull request?
Add test cases for fromString
## How was this patch tested?
N/A
Closes #22345 from gatorsmile/addTest.
Authored-by: Xiao Li <ga...@gmail.com>
Signed-off-by: gatorsmile <ga...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f96a8bf8
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f96a8bf8
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f96a8bf8
Branch: refs/heads/master
Commit: f96a8bf8ffe9472a839ca482f64c7cdf7540c243
Parents: 6d7bc5a
Author: Xiao Li <ga...@gmail.com>
Authored: Thu Sep 6 23:36:30 2018 -0700
Committer: gatorsmile <ga...@gmail.com>
Committed: Thu Sep 6 23:36:30 2018 -0700
----------------------------------------------------------------------
.../sql/catalyst/expressions/literals.scala | 46 +++++++++++---------
.../expressions/LiteralExpressionSuite.scala | 21 +++++++++
2 files changed, 47 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/f96a8bf8/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 0efd122..2bcbb92 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -128,30 +128,36 @@ object Literal {
val dataType = DataType.parseDataType(json \ "dataType")
json \ "value" match {
case JNull => Literal.create(null, dataType)
- case JString(str) =>
- val value = dataType match {
- case BooleanType => str.toBoolean
- case ByteType => str.toByte
- case ShortType => str.toShort
- case IntegerType => str.toInt
- case LongType => str.toLong
- case FloatType => str.toFloat
- case DoubleType => str.toDouble
- case StringType => UTF8String.fromString(str)
- case DateType => java.sql.Date.valueOf(str)
- case TimestampType => java.sql.Timestamp.valueOf(str)
- case CalendarIntervalType => CalendarInterval.fromString(str)
- case t: DecimalType =>
- val d = Decimal(str)
- assert(d.changePrecision(t.precision, t.scale))
- d
- case _ => null
- }
- Literal.create(value, dataType)
+ case JString(str) => fromString(str, dataType)
case other => sys.error(s"$other is not a valid Literal json value")
}
}
+ /**
+ * Constructs a Literal from a String
+ */
+ def fromString(str: String, dataType: DataType): Literal = {
+ val value = dataType match {
+ case BooleanType => str.toBoolean
+ case ByteType => str.toByte
+ case ShortType => str.toShort
+ case IntegerType => str.toInt
+ case LongType => str.toLong
+ case FloatType => str.toFloat
+ case DoubleType => str.toDouble
+ case StringType => UTF8String.fromString(str)
+ case DateType => java.sql.Date.valueOf(str)
+ case TimestampType => java.sql.Timestamp.valueOf(str)
+ case CalendarIntervalType => CalendarInterval.fromString(str)
+ case t: DecimalType =>
+ val d = Decimal(str)
+ assert(d.changePrecision(t.precision, t.scale))
+ d
+ case _ => null
+ }
+ Literal.create(value, dataType)
+ }
+
def create(v: Any, dataType: DataType): Literal = {
Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
}
http://git-wip-us.apache.org/repos/asf/spark/blob/f96a8bf8/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index 86f80fe..3ea6bfa 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -226,4 +226,25 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Literal('\u0000'), "\u0000")
checkEvaluation(Literal.create('\n'), "\n")
}
+
+ test("fromString converts String/DataType input correctly") {
+ checkEvaluation(Literal.fromString(false.toString, BooleanType), false)
+ checkEvaluation(Literal.fromString(null, NullType), null)
+ checkEvaluation(Literal.fromString(Int.MaxValue.toByte.toString, ByteType), Int.MaxValue.toByte)
+ checkEvaluation(Literal.fromString(Short.MaxValue.toShort.toString, ShortType), Short.MaxValue
+ .toShort)
+ checkEvaluation(Literal.fromString(Int.MaxValue.toString, IntegerType), Int.MaxValue)
+ checkEvaluation(Literal.fromString(Long.MaxValue.toString, LongType), Long.MaxValue)
+ checkEvaluation(Literal.fromString(Float.MaxValue.toString, FloatType), Float.MaxValue)
+ checkEvaluation(Literal.fromString(Double.MaxValue.toString, DoubleType), Double.MaxValue)
+ checkEvaluation(Literal.fromString("1.23456", DecimalType(10, 5)), Decimal(1.23456))
+ checkEvaluation(Literal.fromString("Databricks", StringType), "Databricks")
+ val dateString = "1970-01-01"
+ checkEvaluation(Literal.fromString(dateString, DateType), java.sql.Date.valueOf(dateString))
+ val timestampString = "0000-01-01 00:00:00"
+ checkEvaluation(Literal.fromString(timestampString, TimestampType),
+ java.sql.Timestamp.valueOf(timestampString))
+ val calInterval = new CalendarInterval(1, 1)
+ checkEvaluation(Literal.fromString(calInterval.toString, CalendarIntervalType), calInterval)
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org