You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2020/11/30 04:19:50 UTC

[GitHub] [spark] leanken commented on a change in pull request #30516: [SPARK-33572][SQL] Datetime building should fail if the year, month, ..., second combination is invalid

leanken commented on a change in pull request #30516:
URL: https://github.com/apache/spark/pull/30516#discussion_r532325226



##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1797,23 +1797,26 @@ case class MakeDate(year: Expression, month: Expression, day: Expression)
   override def dataType: DataType = DateType
   override def nullable: Boolean = true
 
+  val ansiEnabled: Boolean = SQLConf.get.ansiEnabled

Review comment:
       Nit. how about rename to failOnError to align with the former PR like SPARK-33498.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1897,6 +1900,8 @@ case class MakeTimestamp(
   override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
     copy(timeZoneId = Option(timeZoneId))
 
+  val ansiEnabled: Boolean = SQLConf.get.ansiEnabled

Review comment:
       ditto.

##########
File path: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
##########
@@ -1023,6 +1023,21 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkEvaluation(MakeDate(Literal(2019), Literal(7), Literal(32)), null)
   }
 
+  test("creating values of DateType via make_date with ansiEnabled") {
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+      checkEvaluation(MakeDate(Literal(2013), Literal(7), Literal(15)), Date.valueOf("2013-7-15"))

Review comment:
       add comments to distinguish when it is returning null, and when it is excepting exception

##########
File path: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
##########
@@ -1059,6 +1074,52 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkEvaluation(makeTimestampExpr, Timestamp.valueOf("2019-08-12 00:00:58.000001"))
   }
 
+  test("creating values of TimestampType via make_timestamp with ansiEnabled") {

Review comment:
       ditto.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1955,6 +1960,7 @@ case class MakeTimestamp(
     val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
     val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName)
     val d = Decimal.getClass.getName.stripSuffix("$")
+    val exceptionCode = if (ansiEnabled) "throw e;" else s"${ev.isNull} = true;"

Review comment:
       ditto.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -197,6 +197,8 @@ case class MakeInterval(
   override def dataType: DataType = CalendarIntervalType
   override def nullable: Boolean = true

Review comment:
       we should update nullable like
   
   if (failOnError) children.exists(_.nullable) else true

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -215,19 +217,20 @@ case class MakeInterval(
         min.asInstanceOf[Int],
         sec.map(_.asInstanceOf[Decimal]).getOrElse(Decimal(0, Decimal.MAX_LONG_DIGITS, 6)))
     } catch {
-      case _: ArithmeticException => null
+      case _: ArithmeticException if !ansiEnabled => null
     }
   }
 
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     nullSafeCodeGen(ctx, ev, (year, month, week, day, hour, min, sec) => {
       val iu = IntervalUtils.getClass.getName.stripSuffix("$")
       val secFrac = sec.getOrElse("0")
+      val exceptionCode = if (ansiEnabled) "throw e;" else s"${ev.isNull} = true;"
       s"""
         try {
           ${ev.value} = $iu.makeInterval($year, $month, $week, $day, $hour, $min, $secFrac);
         } catch (java.lang.ArithmeticException e) {
-          ${ev.isNull} = true;
+          $exceptionCode

Review comment:
       nit. how about failOnErrorBranch.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1797,23 +1797,26 @@ case class MakeDate(year: Expression, month: Expression, day: Expression)
   override def dataType: DataType = DateType
   override def nullable: Boolean = true
 
+  val ansiEnabled: Boolean = SQLConf.get.ansiEnabled
+
   override def nullSafeEval(year: Any, month: Any, day: Any): Any = {
     try {
       val ld = LocalDate.of(year.asInstanceOf[Int], month.asInstanceOf[Int], day.asInstanceOf[Int])
       localDateToDays(ld)
     } catch {
-      case _: java.time.DateTimeException => null
+      case _: java.time.DateTimeException if !ansiEnabled => null
     }
   }
 
   override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
     val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
+    val exceptionCode = if (ansiEnabled) "throw e;" else s"${ev.isNull} = true;"

Review comment:
       ditto

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -215,19 +217,20 @@ case class MakeInterval(
         min.asInstanceOf[Int],
         sec.map(_.asInstanceOf[Decimal]).getOrElse(Decimal(0, Decimal.MAX_LONG_DIGITS, 6)))
     } catch {
-      case _: ArithmeticException => null
+      case _: ArithmeticException if !ansiEnabled => null

Review comment:
       ditto.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1978,7 +1984,7 @@ case class MakeTimestamp(
         java.time.Instant instant = ldt.atZone($zoneId).toInstant();
         ${ev.value} = $dtu.instantToMicros(instant);
       } catch (java.time.DateTimeException e) {
-        ${ev.isNull} = true;
+        $exceptionCode

Review comment:
       ditto.

##########
File path: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
##########
@@ -1023,6 +1023,21 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkEvaluation(MakeDate(Literal(2019), Literal(7), Literal(32)), null)
   }
 
+  test("creating values of DateType via make_date with ansiEnabled") {

Review comment:
       nit. rename test name to 
   ANSI mode: creating values of DateType via make_date




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org