You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/07/13 14:25:15 UTC

[spark] branch branch-3.2 updated: [SPARK-36120][SQL] Support TimestampNTZ type in cache table

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 3ace01b  [SPARK-36120][SQL] Support TimestampNTZ type in cache table
3ace01b is described below

commit 3ace01b25bf1c69901a07c11acbae6fa996fc06c
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Tue Jul 13 17:23:48 2021 +0300

    [SPARK-36120][SQL] Support TimestampNTZ type in cache table
    
    ### What changes were proposed in this pull request?
    
    Support TimestampNTZ type column in SQL command Cache table
    
    ### Why are the changes needed?
    
    Cache table should support the new timestamp type.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the TimemstampNTZ type column can used in `CACHE TABLE`
    
    ### How was this patch tested?
    
    Unit test
    
    Closes #33322 from gengliangwang/cacheTable.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Max Gekk <ma...@gmail.com>
    (cherry picked from commit 067432705fbec970bd713adf37d2aa17c6bcf5a0)
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../spark/sql/execution/columnar/ColumnBuilder.scala      |  3 ++-
 .../apache/spark/sql/execution/columnar/ColumnType.scala  |  2 +-
 .../sql/execution/columnar/GenerateColumnAccessor.scala   |  2 +-
 .../scala/org/apache/spark/sql/CachedTableSuite.scala     | 15 ++++++++++++++-
 4 files changed, 18 insertions(+), 4 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
index e9251e8..9ddc665 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
@@ -175,7 +175,8 @@ private[columnar] object ColumnBuilder {
       case ByteType => new ByteColumnBuilder
       case ShortType => new ShortColumnBuilder
       case IntegerType | DateType | _: YearMonthIntervalType => new IntColumnBuilder
-      case LongType | TimestampType | _: DayTimeIntervalType => new LongColumnBuilder
+      case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
+        new LongColumnBuilder
       case FloatType => new FloatColumnBuilder
       case DoubleType => new DoubleColumnBuilder
       case StringType => new StringColumnBuilder
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
index 8e99368..419dcc6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
@@ -818,7 +818,7 @@ private[columnar] object ColumnType {
       case ByteType => BYTE
       case ShortType => SHORT
       case IntegerType | DateType | _: YearMonthIntervalType => INT
-      case LongType | TimestampType | _: DayTimeIntervalType => LONG
+      case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType => LONG
       case FloatType => FLOAT
       case DoubleType => DOUBLE
       case StringType => STRING
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
index 190c2c3..6e666d4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
@@ -81,7 +81,7 @@ object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarItera
         case ByteType => classOf[ByteColumnAccessor].getName
         case ShortType => classOf[ShortColumnAccessor].getName
         case IntegerType | DateType | _: YearMonthIntervalType => classOf[IntColumnAccessor].getName
-        case LongType | TimestampType | _: DayTimeIntervalType =>
+        case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
           classOf[LongColumnAccessor].getName
         case FloatType => classOf[FloatColumnAccessor].getName
         case DoubleType => classOf[DoubleColumnAccessor].getName
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 1915044..db717d2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
 
 import java.io.{File, FilenameFilter}
 import java.nio.file.{Files, Paths}
-import java.time.{Duration, Period}
+import java.time.{Duration, LocalDateTime, Period}
 
 import scala.collection.mutable.HashSet
 import scala.concurrent.duration._
@@ -1538,6 +1538,19 @@ class CachedTableSuite extends QueryTest with SQLTestUtils
     }
   }
 
+  test("SPARK-36120: Support cache/uncache table with TimestampNTZ type") {
+    val tableName = "ntzCache"
+    withTable(tableName) {
+      sql(s"CACHE TABLE $tableName AS SELECT TIMESTAMP_NTZ'2021-01-01 00:00:00'")
+      checkAnswer(spark.table(tableName), Row(LocalDateTime.parse("2021-01-01T00:00:00")))
+      spark.table(tableName).queryExecution.withCachedData.collect {
+        case cached: InMemoryRelation =>
+          assert(cached.stats.sizeInBytes === 8)
+      }
+      sql(s"UNCACHE TABLE $tableName")
+    }
+  }
+
   private def testCreateTemporaryViewUsingWithCache(ident: TableIdentifier): Unit = {
     withTempDir { dir =>
       val path1 = new File(dir, "t1").getCanonicalPath

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org