You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2018/02/04 17:15:52 UTC

spark git commit: [SPARK-22036][SQL][FOLLOWUP] Fix decimalArithmeticOperations.sql

Repository: spark
Updated Branches:
  refs/heads/master 715047b02 -> 6fb3fd153


[SPARK-22036][SQL][FOLLOWUP] Fix decimalArithmeticOperations.sql

## What changes were proposed in this pull request?

Fix decimalArithmeticOperations.sql test

## How was this patch tested?

N/A

Author: Yuming Wang <wg...@gmail.com>
Author: wangyum <wg...@gmail.com>
Author: Yuming Wang <yu...@ebay.com>

Closes #20498 from wangyum/SPARK-22036.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6fb3fd15
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6fb3fd15
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6fb3fd15

Branch: refs/heads/master
Commit: 6fb3fd15365d43733aefdb396db205d7ccf57f75
Parents: 715047b
Author: Yuming Wang <wg...@gmail.com>
Authored: Sun Feb 4 09:15:48 2018 -0800
Committer: gatorsmile <ga...@gmail.com>
Committed: Sun Feb 4 09:15:48 2018 -0800

----------------------------------------------------------------------
 .../native/decimalArithmeticOperations.sql      |   6 +-
 .../native/decimalArithmeticOperations.sql.out  | 140 ++++++++++---------
 2 files changed, 80 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6fb3fd15/sql/core/src/test/resources/sql-tests/inputs/typeCoercion/native/decimalArithmeticOperations.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/typeCoercion/native/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/typeCoercion/native/decimalArithmeticOperations.sql
index c6d8a49..9be7fcd 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/typeCoercion/native/decimalArithmeticOperations.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/typeCoercion/native/decimalArithmeticOperations.sql
@@ -48,8 +48,9 @@ select 12345678901234567890.0 * 12345678901234567890.0;
 select 1e35 / 0.1;
 
 -- arithmetic operations causing a precision loss are truncated
+select 12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345;
 select 123456789123456789.1234567890 * 1.123456789123456789;
-select 0.001 / 9876543210987654321098765432109876543.2
+select 12345678912345.123456789123 / 0.000000012345678;
 
 -- return NULL instead of rounding, according to old Spark versions' behavior
 set spark.sql.decimalOperations.allowPrecisionLoss=false;
@@ -74,7 +75,8 @@ select 12345678901234567890.0 * 12345678901234567890.0;
 select 1e35 / 0.1;
 
 -- arithmetic operations causing a precision loss return NULL
+select 12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345;
 select 123456789123456789.1234567890 * 1.123456789123456789;
-select 0.001 / 9876543210987654321098765432109876543.2
+select 12345678912345.123456789123 / 0.000000012345678;
 
 drop table decimals_test;

http://git-wip-us.apache.org/repos/asf/spark/blob/6fb3fd15/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalArithmeticOperations.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalArithmeticOperations.sql.out
index 4d70fe1..6bfdb84 100644
--- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalArithmeticOperations.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalArithmeticOperations.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 32
+-- Number of queries: 36
 
 
 -- !query 0
@@ -146,146 +146,158 @@ NULL
 
 
 -- !query 17
-select 123456789123456789.1234567890 * 1.123456789123456789
+select 12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345
 -- !query 17 schema
-struct<(CAST(123456789123456789.1234567890 AS DECIMAL(36,18)) * CAST(1.123456789123456789 AS DECIMAL(36,18))):decimal(38,18)>
+struct<(CAST(12345678912345678912345678912.1234567 AS DECIMAL(38,6)) + CAST(9999999999999999999999999999999.12345 AS DECIMAL(38,6))):decimal(38,6)>
 -- !query 17 output
-138698367904130467.654320988515622621
+10012345678912345678912345678911.246907
 
 
 -- !query 18
-select 0.001 / 9876543210987654321098765432109876543.2
-
-set spark.sql.decimalOperations.allowPrecisionLoss=false
+select 123456789123456789.1234567890 * 1.123456789123456789
 -- !query 18 schema
-struct<>
+struct<(CAST(123456789123456789.1234567890 AS DECIMAL(36,18)) * CAST(1.123456789123456789 AS DECIMAL(36,18))):decimal(38,18)>
 -- !query 18 output
-org.apache.spark.sql.catalyst.parser.ParseException
-
-mismatched input 'spark' expecting <EOF>(line 3, pos 4)
-
-== SQL ==
-select 0.001 / 9876543210987654321098765432109876543.2
-
-set spark.sql.decimalOperations.allowPrecisionLoss=false
-----^^^
+138698367904130467.654320988515622621
 
 
 -- !query 19
-select id, a+b, a-b, a*b, a/b from decimals_test order by id
+select 12345678912345.123456789123 / 0.000000012345678
 -- !query 19 schema
-struct<id:int,(a + b):decimal(38,17),(a - b):decimal(38,17),(a * b):decimal(38,6),(a / b):decimal(38,6)>
+struct<(CAST(12345678912345.123456789123 AS DECIMAL(29,15)) / CAST(1.2345678E-8 AS DECIMAL(29,15))):decimal(38,9)>
 -- !query 19 output
-1	1099	-899	99900	0.1001
-2	24690.246	0	152402061.885129	1
-3	1234.2234567891011	-1233.9765432108989	152.358023	0.0001
-4	123456789123456790.12345678912345679	123456789123456787.87654321087654321	138698367904130467.515623	109890109097814272.043109
+1000000073899961059796.725866332
 
 
 -- !query 20
-select id, a*10, b/10 from decimals_test order by id
+set spark.sql.decimalOperations.allowPrecisionLoss=false
 -- !query 20 schema
-struct<id:int,(CAST(a AS DECIMAL(38,18)) * CAST(CAST(10 AS DECIMAL(2,0)) AS DECIMAL(38,18))):decimal(38,15),(CAST(b AS DECIMAL(38,18)) / CAST(CAST(10 AS DECIMAL(2,0)) AS DECIMAL(38,18))):decimal(38,18)>
+struct<key:string,value:string>
 -- !query 20 output
-1	1000	99.9
-2	123451.23	1234.5123
-3	1.234567891011	123.41
-4	1234567891234567890	0.112345678912345679
+spark.sql.decimalOperations.allowPrecisionLoss	false
 
 
 -- !query 21
-select 10.3 * 3.0
+select id, a+b, a-b, a*b, a/b from decimals_test order by id
 -- !query 21 schema
-struct<(CAST(10.3 AS DECIMAL(3,1)) * CAST(3.0 AS DECIMAL(3,1))):decimal(6,2)>
+struct<id:int,(a + b):decimal(38,18),(a - b):decimal(38,18),(a * b):decimal(38,36),(a / b):decimal(38,18)>
 -- !query 21 output
-30.9
+1	1099	-899	NULL	0.1001001001001001
+2	24690.246	0	NULL	1
+3	1234.2234567891011	-1233.9765432108989	NULL	0.000100037913541123
+4	123456789123456790.123456789123456789	123456789123456787.876543210876543211	NULL	109890109097814272.043109406191131436
 
 
 -- !query 22
-select 10.3000 * 3.0
+select id, a*10, b/10 from decimals_test order by id
 -- !query 22 schema
-struct<(CAST(10.3000 AS DECIMAL(6,4)) * CAST(3.0 AS DECIMAL(6,4))):decimal(9,5)>
+struct<id:int,(CAST(a AS DECIMAL(38,18)) * CAST(CAST(10 AS DECIMAL(2,0)) AS DECIMAL(38,18))):decimal(38,18),(CAST(b AS DECIMAL(38,18)) / CAST(CAST(10 AS DECIMAL(2,0)) AS DECIMAL(38,18))):decimal(38,19)>
 -- !query 22 output
-30.9
+1	1000	99.9
+2	123451.23	1234.5123
+3	1.234567891011	123.41
+4	1234567891234567890	0.1123456789123456789
 
 
 -- !query 23
-select 10.30000 * 30.0
+select 10.3 * 3.0
 -- !query 23 schema
-struct<(CAST(10.30000 AS DECIMAL(7,5)) * CAST(30.0 AS DECIMAL(7,5))):decimal(11,6)>
+struct<(CAST(10.3 AS DECIMAL(3,1)) * CAST(3.0 AS DECIMAL(3,1))):decimal(6,2)>
 -- !query 23 output
-309
+30.9
 
 
 -- !query 24
-select 10.300000000000000000 * 3.000000000000000000
+select 10.3000 * 3.0
 -- !query 24 schema
-struct<(CAST(10.300000000000000000 AS DECIMAL(20,18)) * CAST(3.000000000000000000 AS DECIMAL(20,18))):decimal(38,34)>
+struct<(CAST(10.3000 AS DECIMAL(6,4)) * CAST(3.0 AS DECIMAL(6,4))):decimal(9,5)>
 -- !query 24 output
 30.9
 
 
 -- !query 25
-select 10.300000000000000000 * 3.0000000000000000000
+select 10.30000 * 30.0
 -- !query 25 schema
-struct<(CAST(10.300000000000000000 AS DECIMAL(21,19)) * CAST(3.0000000000000000000 AS DECIMAL(21,19))):decimal(38,34)>
+struct<(CAST(10.30000 AS DECIMAL(7,5)) * CAST(30.0 AS DECIMAL(7,5))):decimal(11,6)>
 -- !query 25 output
-30.9
+309
 
 
 -- !query 26
-select (5e36 + 0.1) + 5e36
+select 10.300000000000000000 * 3.000000000000000000
 -- !query 26 schema
-struct<(CAST((CAST(5E+36 AS DECIMAL(38,1)) + CAST(0.1 AS DECIMAL(38,1))) AS DECIMAL(38,1)) + CAST(5E+36 AS DECIMAL(38,1))):decimal(38,1)>
+struct<(CAST(10.300000000000000000 AS DECIMAL(20,18)) * CAST(3.000000000000000000 AS DECIMAL(20,18))):decimal(38,36)>
 -- !query 26 output
-NULL
+30.9
 
 
 -- !query 27
-select (-4e36 - 0.1) - 7e36
+select 10.300000000000000000 * 3.0000000000000000000
 -- !query 27 schema
-struct<(CAST((CAST(-4E+36 AS DECIMAL(38,1)) - CAST(0.1 AS DECIMAL(38,1))) AS DECIMAL(38,1)) - CAST(7E+36 AS DECIMAL(38,1))):decimal(38,1)>
+struct<(CAST(10.300000000000000000 AS DECIMAL(21,19)) * CAST(3.0000000000000000000 AS DECIMAL(21,19))):decimal(38,37)>
 -- !query 27 output
 NULL
 
 
 -- !query 28
-select 12345678901234567890.0 * 12345678901234567890.0
+select (5e36 + 0.1) + 5e36
 -- !query 28 schema
-struct<(12345678901234567890.0 * 12345678901234567890.0):decimal(38,2)>
+struct<(CAST((CAST(5E+36 AS DECIMAL(38,1)) + CAST(0.1 AS DECIMAL(38,1))) AS DECIMAL(38,1)) + CAST(5E+36 AS DECIMAL(38,1))):decimal(38,1)>
 -- !query 28 output
 NULL
 
 
 -- !query 29
-select 1e35 / 0.1
+select (-4e36 - 0.1) - 7e36
 -- !query 29 schema
-struct<(CAST(1E+35 AS DECIMAL(37,1)) / CAST(0.1 AS DECIMAL(37,1))):decimal(38,6)>
+struct<(CAST((CAST(-4E+36 AS DECIMAL(38,1)) - CAST(0.1 AS DECIMAL(38,1))) AS DECIMAL(38,1)) - CAST(7E+36 AS DECIMAL(38,1))):decimal(38,1)>
 -- !query 29 output
 NULL
 
 
 -- !query 30
-select 123456789123456789.1234567890 * 1.123456789123456789
+select 12345678901234567890.0 * 12345678901234567890.0
 -- !query 30 schema
-struct<(CAST(123456789123456789.1234567890 AS DECIMAL(36,18)) * CAST(1.123456789123456789 AS DECIMAL(36,18))):decimal(38,18)>
+struct<(12345678901234567890.0 * 12345678901234567890.0):decimal(38,2)>
 -- !query 30 output
-138698367904130467.654320988515622621
+NULL
 
 
 -- !query 31
-select 0.001 / 9876543210987654321098765432109876543.2
-
-drop table decimals_test
+select 1e35 / 0.1
 -- !query 31 schema
-struct<>
+struct<(CAST(1E+35 AS DECIMAL(37,1)) / CAST(0.1 AS DECIMAL(37,1))):decimal(38,3)>
 -- !query 31 output
-org.apache.spark.sql.catalyst.parser.ParseException
+NULL
 
-mismatched input 'table' expecting <EOF>(line 3, pos 5)
 
-== SQL ==
-select 0.001 / 9876543210987654321098765432109876543.2
+-- !query 32
+select 12345678912345678912345678912.1234567 + 9999999999999999999999999999999.12345
+-- !query 32 schema
+struct<(CAST(12345678912345678912345678912.1234567 AS DECIMAL(38,7)) + CAST(9999999999999999999999999999999.12345 AS DECIMAL(38,7))):decimal(38,7)>
+-- !query 32 output
+NULL
+
+
+-- !query 33
+select 123456789123456789.1234567890 * 1.123456789123456789
+-- !query 33 schema
+struct<(CAST(123456789123456789.1234567890 AS DECIMAL(36,18)) * CAST(1.123456789123456789 AS DECIMAL(36,18))):decimal(38,28)>
+-- !query 33 output
+NULL
+
 
+-- !query 34
+select 12345678912345.123456789123 / 0.000000012345678
+-- !query 34 schema
+struct<(CAST(12345678912345.123456789123 AS DECIMAL(29,15)) / CAST(1.2345678E-8 AS DECIMAL(29,15))):decimal(38,18)>
+-- !query 34 output
+NULL
+
+
+-- !query 35
 drop table decimals_test
------^^^
+-- !query 35 schema
+struct<>
+-- !query 35 output
+


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org