You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/11/08 20:15:27 UTC

[spark] branch branch-3.2 updated: [SPARK-37196][SQL] HiveDecimal enforcePrecisionScale failed return null

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 8a8f4f6  [SPARK-37196][SQL] HiveDecimal enforcePrecisionScale failed return null
8a8f4f6 is described below

commit 8a8f4f624adc6b54766d6c545ca4ebfd05889ea8
Author: Angerszhuuuu <an...@gmail.com>
AuthorDate: Mon Nov 8 12:08:32 2021 -0800

    [SPARK-37196][SQL] HiveDecimal enforcePrecisionScale failed return null
    
    ### What changes were proposed in this pull request?
    For case
    ```
    withTempDir { dir =>
          withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") {
            withTable("test_precision") {
              val df = sql("SELECT 'dummy' AS name, 1000000000000000000010.7000000000000010 AS value")
              df.write.mode("Overwrite").parquet(dir.getAbsolutePath)
              sql(
                s"""
                   |CREATE EXTERNAL TABLE test_precision(name STRING, value DECIMAL(18,6))
                   |STORED AS PARQUET LOCATION '${dir.getAbsolutePath}'
                   |""".stripMargin)
              checkAnswer(sql("SELECT * FROM test_precision"), Row("dummy", null))
            }
          }
        }
    ```
    
    We write a data with schema
    
    It's caused by you create a df with
    ```
    root
     |-- name: string (nullable = false)
     |-- value: decimal(38,16) (nullable = false)
    ```
    but create table schema
    
    ```
    root
     |-- name: string (nullable = false)
     |-- value: decimal(18,6) (nullable = false)
    ```
    
    This will cause enforcePrecisionScale return `null`
    ```
      public HiveDecimal getPrimitiveJavaObject(Object o) {
        return o == null ? null : this.enforcePrecisionScale(((HiveDecimalWritable)o).getHiveDecimal());
      }
    ```
    Then throw NPE when call `toCatalystDecimal `
    
    We should judge if the return value is `null` to avoid throw NPE
    
    ### Why are the changes needed?
    Fix bug
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Added UT
    
    Closes #34519 from AngersZhuuuu/SPARK-37196.
    
    Authored-by: Angerszhuuuu <an...@gmail.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
    (cherry picked from commit a4f8ffbbfb0158a03ff52f1ed0dde75241c3a90e)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 .../main/scala/org/apache/spark/sql/hive/HiveShim.scala | 15 ++++++++++++---
 .../apache/spark/sql/hive/execution/SQLQuerySuite.scala | 17 +++++++++++++++++
 2 files changed, 29 insertions(+), 3 deletions(-)

diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
index 3a53a2a..351cde5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
@@ -101,10 +101,19 @@ private[hive] object HiveShim {
 
   def toCatalystDecimal(hdoi: HiveDecimalObjectInspector, data: Any): Decimal = {
     if (hdoi.preferWritable()) {
-      Decimal(hdoi.getPrimitiveWritableObject(data).getHiveDecimal().bigDecimalValue,
-        hdoi.precision(), hdoi.scale())
+      val value = hdoi.getPrimitiveWritableObject(data)
+      if (value == null) {
+        null
+      } else {
+        Decimal(value.getHiveDecimal().bigDecimalValue, hdoi.precision(), hdoi.scale())
+      }
     } else {
-      Decimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue(), hdoi.precision(), hdoi.scale())
+      val value = hdoi.getPrimitiveJavaObject(data)
+      if (value == null) {
+        null
+      } else {
+        Decimal(value.bigDecimalValue(), hdoi.precision(), hdoi.scale())
+      }
     }
   }
 
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 5db7050..8d248bb 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -2642,6 +2642,23 @@ abstract class SQLQuerySuiteBase extends QueryTest with SQLTestUtils with TestHi
       }
     }
   }
+
+  test("SPARK-37196: HiveDecimal Precision Scale match failed should return null") {
+    withTempDir { dir =>
+      withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") {
+        withTable("test_precision") {
+          val df = sql(s"SELECT 'dummy' AS name, ${"1" * 20}.${"2" * 18} AS value")
+          df.write.mode("Overwrite").parquet(dir.getAbsolutePath)
+          sql(
+            s"""
+               |CREATE EXTERNAL TABLE test_precision(name STRING, value DECIMAL(18,6))
+               |STORED AS PARQUET LOCATION '${dir.getAbsolutePath}'
+               |""".stripMargin)
+          checkAnswer(sql("SELECT * FROM test_precision"), Row("dummy", null))
+        }
+      }
+    }
+  }
 }
 
 @SlowHiveTest

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org