You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/08/17 14:00:01 UTC

[spark] branch branch-2.4 updated: Revert "[SPARK-32018][SQL][2.4] UnsafeRow.setDecimal should set null with overflowed value"

This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.4 by this push:
     new e3ec2d7  Revert "[SPARK-32018][SQL][2.4] UnsafeRow.setDecimal should set null with overflowed value"
e3ec2d7 is described below

commit e3ec2d7ad15383d915dcc6cda5bc63f490396956
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Mon Aug 17 21:58:13 2020 +0800

    Revert "[SPARK-32018][SQL][2.4] UnsafeRow.setDecimal should set null with overflowed value"
    
    This reverts commit afdad0eea81f40cab32b95af8c1bbeed55c5f10f.
---
 .../org/apache/spark/sql/catalyst/expressions/UnsafeRow.java   |  2 +-
 .../src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala   | 10 ----------
 2 files changed, 1 insertion(+), 11 deletions(-)

diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index 38c2880..a2440d9 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -293,7 +293,7 @@ public final class UnsafeRow extends InternalRow implements Externalizable, Kryo
       Platform.putLong(baseObject, baseOffset + cursor, 0L);
       Platform.putLong(baseObject, baseOffset + cursor + 8, 0L);
 
-      if (value == null || !value.changePrecision(precision, value.scale())) {
+      if (value == null) {
         setNullAt(ordinal);
         // keep the offset for future update
         Platform.putLong(baseObject, getFieldOffset(ordinal), cursor << 32);
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
index 9daa69c..a5f904c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala
@@ -178,14 +178,4 @@ class UnsafeRowSuite extends SparkFunSuite {
     // Makes sure hashCode on unsafe array won't crash
     unsafeRow.getArray(0).hashCode()
   }
-
-  test("SPARK-32018: setDecimal with overflowed value") {
-    val d1 = new Decimal().set(BigDecimal("10000000000000000000")).toPrecision(38, 18)
-    val row = InternalRow.apply(d1)
-    val unsafeRow = UnsafeProjection.create(Array[DataType](DecimalType(38, 18))).apply(row)
-    assert(unsafeRow.getDecimal(0, 38, 18) === d1)
-    val d2 = (d1 * Decimal(10)).toPrecision(39, 18)
-    unsafeRow.setDecimal(0, d2, 38)
-    assert(unsafeRow.getDecimal(0, 38, 18) === null)
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org