You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2020/06/18 11:55:55 UTC

[GitHub] [spark] MaxGekk commented on a change in pull request #27617: [SPARK-30865][SQL] Refactor DateTimeUtils

MaxGekk commented on a change in pull request #27617:
URL: https://github.com/apache/spark/pull/27617#discussion_r442170704



##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
##########
@@ -178,59 +171,56 @@ object DateTimeUtils {
    *          Gregorian calendars.
    * @return The number of micros since epoch from `java.sql.Timestamp`.
    */
-  def fromJavaTimestamp(t: Timestamp): SQLTimestamp = {
+  def fromJavaTimestamp(t: Timestamp): Long = {
     val micros = millisToMicros(t.getTime) + (t.getNanos / NANOS_PER_MICROS) % MICROS_PER_MILLIS
     rebaseJulianToGregorianMicros(micros)
   }
 
   /**
-   * Returns the number of microseconds since epoch from Julian day
-   * and nanoseconds in a day
+   * Returns the number of microseconds since epoch from Julian day and nanoseconds in a day.
    */
-  def fromJulianDay(day: Int, nanoseconds: Long): SQLTimestamp = {
+  def fromJulianDay(days: Int, nanos: Long): Long = {
     // use Long to avoid rounding errors
-    val seconds = (day - JULIAN_DAY_OF_EPOCH).toLong * SECONDS_PER_DAY
-    val micros = SECONDS.toMicros(seconds) + NANOSECONDS.toMicros(nanoseconds)
-    val rebased = rebaseJulianToGregorianMicros(micros)
-    rebased
+    val micros = (days - JULIAN_DAY_OF_EPOCH).toLong * MICROS_PER_DAY + nanos / NANOS_PER_MICROS
+    rebaseJulianToGregorianMicros(micros)
   }
 
   /**
    * Returns Julian day and nanoseconds in a day from the number of microseconds
    *
    * Note: support timestamp since 4717 BC (without negative nanoseconds, compatible with Hive).
    */
-  def toJulianDay(us: SQLTimestamp): (Int, Long) = {
-    val julian_us = rebaseGregorianToJulianMicros(us) + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
-    val day = julian_us / MICROS_PER_DAY
-    val micros = julian_us % MICROS_PER_DAY
-    (day.toInt, MICROSECONDS.toNanos(micros))
+  def toJulianDay(micros: Long): (Int, Long) = {
+    val julianUs = rebaseGregorianToJulianMicros(micros) + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
+    val days = julianUs / MICROS_PER_DAY
+    val us = julianUs % MICROS_PER_DAY
+    (days.toInt, MICROSECONDS.toNanos(us))
   }
 
-  /*
+  /**
    * Converts the timestamp to milliseconds since epoch. In spark timestamp values have microseconds
    * precision, so this conversion is lossy.
    */
-  def microsToMillis(us: SQLTimestamp): Long = {
+  def microsToMillis(micros: Long): Long = {
     // When the timestamp is negative i.e before 1970, we need to adjust the millseconds portion.
     // Example - 1965-01-01 10:11:12.123456 is represented as (-157700927876544) in micro precision.
     // In millis precision the above needs to be represented as (-157700927877).
-    Math.floorDiv(us, MICROS_PER_MILLIS)
+    Math.floorDiv(micros, MICROS_PER_MILLIS)
   }
 
-  /*
-   * Converts milliseconds since epoch to SQLTimestamp.
+  /**
+   * Converts milliseconds since the epoch to microseconds.
    */
-  def millisToMicros(millis: Long): SQLTimestamp = {
+  def millisToMicros(millis: Long): Long = {
     Math.multiplyExact(millis, MICROS_PER_MILLIS)
   }
 
-  def microsToEpochDays(epochMicros: SQLTimestamp, zoneId: ZoneId): SQLDate = {
-    localDateToDays(microsToInstant(epochMicros).atZone(zoneId).toLocalDate)
+  def microsToEpochDays(micros: Long, zoneId: ZoneId): Int = {

Review comment:
       Looks like, yes




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org