You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by da...@apache.org on 2015/10/08 00:51:14 UTC
spark git commit: [SPARK-10980] [SQL] fix bug in create Decimal
Repository: spark
Updated Branches:
refs/heads/master 7bf07faa7 -> 37526aca2
[SPARK-10980] [SQL] fix bug in create Decimal
The created decimal is wrong if using `Decimal(unscaled, precision, scale)` with unscaled > 1e18 and and precision > 18 and scale > 0.
This bug exists since the beginning.
Author: Davies Liu <da...@databricks.com>
Closes #9014 from davies/fix_decimal.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/37526aca
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/37526aca
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/37526aca
Branch: refs/heads/master
Commit: 37526aca2430e36a931fbe6e01a152e701a1b94e
Parents: 7bf07fa
Author: Davies Liu <da...@databricks.com>
Authored: Wed Oct 7 15:51:09 2015 -0700
Committer: Davies Liu <da...@gmail.com>
Committed: Wed Oct 7 15:51:09 2015 -0700
----------------------------------------------------------------------
.../src/main/scala/org/apache/spark/sql/types/Decimal.scala | 2 +-
.../scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/37526aca/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index bfcf111..909b8e3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -88,7 +88,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
if (precision < 19) {
return null // Requested precision is too low to represent this value
}
- this.decimalVal = BigDecimal(unscaled)
+ this.decimalVal = BigDecimal(unscaled, scale)
this.longVal = 0L
} else {
val p = POW_10(math.min(precision, MAX_LONG_DIGITS))
http://git-wip-us.apache.org/repos/asf/spark/blob/37526aca/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
index 6921d15..f9aceb8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
@@ -44,6 +44,7 @@ class DecimalSuite extends SparkFunSuite with PrivateMethodTester {
checkDecimal(Decimal(170L, 4, 2), "1.70", 4, 2)
checkDecimal(Decimal(17L, 24, 1), "1.7", 24, 1)
checkDecimal(Decimal(1e17.toLong, 18, 0), 1e17.toLong.toString, 18, 0)
+ checkDecimal(Decimal(1000000000000000000L, 20, 2), "10000000000000000.00", 20, 2)
checkDecimal(Decimal(Long.MaxValue), Long.MaxValue.toString, 20, 0)
checkDecimal(Decimal(Long.MinValue), Long.MinValue.toString, 20, 0)
intercept[IllegalArgumentException](Decimal(170L, 2, 1))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org