You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2019/01/21 18:24:28 UTC
[spark] branch master updated: [SPARK-26652][SQL] Remove fromJSON
and fromString from Literal
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4c1cd80 [SPARK-26652][SQL] Remove fromJSON and fromString from Literal
4c1cd80 is described below
commit 4c1cd809f812fbd208bf09c5f4d92641ecd17605
Author: Maxim Gekk <ma...@databricks.com>
AuthorDate: Tue Jan 22 02:24:12 2019 +0800
[SPARK-26652][SQL] Remove fromJSON and fromString from Literal
## What changes were proposed in this pull request?
The `fromString` and `fromJSON` methods of the `Literal` object are removed because they are not used.
Closes #23596
Closes #23603 from MaxGekk/remove-literal-fromstring.
Authored-by: Maxim Gekk <ma...@databricks.com>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../spark/sql/catalyst/expressions/literals.scala | 34 ----------------------
.../expressions/LiteralExpressionSuite.scala | 21 -------------
2 files changed, 55 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 48beffa..d7ee22f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -126,40 +126,6 @@ object Literal {
def fromObject(obj: Any, objType: DataType): Literal = new Literal(obj, objType)
def fromObject(obj: Any): Literal = new Literal(obj, ObjectType(obj.getClass))
- def fromJSON(json: JValue): Literal = {
- val dataType = DataType.parseDataType(json \ "dataType")
- json \ "value" match {
- case JNull => Literal.create(null, dataType)
- case JString(str) => fromString(str, dataType)
- case other => sys.error(s"$other is not a valid Literal json value")
- }
- }
-
- /**
- * Constructs a Literal from a String
- */
- def fromString(str: String, dataType: DataType): Literal = {
- val value = dataType match {
- case BooleanType => str.toBoolean
- case ByteType => str.toByte
- case ShortType => str.toShort
- case IntegerType => str.toInt
- case LongType => str.toLong
- case FloatType => str.toFloat
- case DoubleType => str.toDouble
- case StringType => UTF8String.fromString(str)
- case DateType => java.sql.Date.valueOf(str)
- case TimestampType => java.sql.Timestamp.valueOf(str)
- case CalendarIntervalType => CalendarInterval.fromString(str)
- case t: DecimalType =>
- val d = Decimal(str)
- assert(d.changePrecision(t.precision, t.scale))
- d
- case _ => null
- }
- Literal.create(value, dataType)
- }
-
def create(v: Any, dataType: DataType): Literal = {
Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index 133aaa4..995d7b4 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -228,25 +228,4 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Literal('\u0000'), "\u0000")
checkEvaluation(Literal.create('\n'), "\n")
}
-
- test("fromString converts String/DataType input correctly") {
- checkEvaluation(Literal.fromString(false.toString, BooleanType), false)
- checkEvaluation(Literal.fromString(null, NullType), null)
- checkEvaluation(Literal.fromString(Int.MaxValue.toByte.toString, ByteType), Int.MaxValue.toByte)
- checkEvaluation(Literal.fromString(Short.MaxValue.toShort.toString, ShortType), Short.MaxValue
- .toShort)
- checkEvaluation(Literal.fromString(Int.MaxValue.toString, IntegerType), Int.MaxValue)
- checkEvaluation(Literal.fromString(Long.MaxValue.toString, LongType), Long.MaxValue)
- checkEvaluation(Literal.fromString(Float.MaxValue.toString, FloatType), Float.MaxValue)
- checkEvaluation(Literal.fromString(Double.MaxValue.toString, DoubleType), Double.MaxValue)
- checkEvaluation(Literal.fromString("1.23456", DecimalType(10, 5)), Decimal(1.23456))
- checkEvaluation(Literal.fromString("Databricks", StringType), "Databricks")
- val dateString = "1970-01-01"
- checkEvaluation(Literal.fromString(dateString, DateType), java.sql.Date.valueOf(dateString))
- val timestampString = "0000-01-01 00:00:00"
- checkEvaluation(Literal.fromString(timestampString, TimestampType),
- java.sql.Timestamp.valueOf(timestampString))
- val calInterval = new CalendarInterval(1, 1)
- checkEvaluation(Literal.fromString(calInterval.toString, CalendarIntervalType), calInterval)
- }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org