You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Dongjoon Hyun (JIRA)" <ji...@apache.org> on 2019/08/12 09:26:00 UTC

[jira] [Resolved] (SPARK-28686) Higher precision in Math.toRadians from JDK 11

     [ https://issues.apache.org/jira/browse/SPARK-28686?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Dongjoon Hyun resolved SPARK-28686.
-----------------------------------
       Resolution: Fixed
    Fix Version/s: 3.0.0

Issue resolved by pull request 25417
[https://github.com/apache/spark/pull/25417]

> Higher precision in Math.toRadians from JDK 11
> ----------------------------------------------
>
>                 Key: SPARK-28686
>                 URL: https://issues.apache.org/jira/browse/SPARK-28686
>             Project: Spark
>          Issue Type: Sub-task
>          Components: SQL
>    Affects Versions: 3.0.0
>            Reporter: Yuming Wang
>            Assignee: Yuming Wang
>            Priority: Major
>             Fix For: 3.0.0
>
>
> {{- udf_radians *** FAILED *** (1 second, 791 milliseconds)}} from {{HiveCompatibilitySuite}}.
> {noformat}
> [info] - udf_radians *** FAILED *** (1 second, 791 milliseconds)
> [info]   Results do not match for udf_radians:
> [info]   == Parsed Logical Plan ==
> [info]   'Project [unresolvedalias('radians(57.2958), None)]
> [info]   +- 'GlobalLimit 1
> [info]      +- 'LocalLimit 1
> [info]         +- 'UnresolvedRelation [src]
> [info]
> [info]   == Analyzed Logical Plan ==
> [info]   RADIANS(CAST(57.2958 AS DOUBLE)): double
> [info]   Project [RADIANS(cast(57.2958 as double)) AS RADIANS(CAST(57.2958 AS DOUBLE))#93209]
> [info]   +- GlobalLimit 1
> [info]      +- LocalLimit 1
> [info]         +- SubqueryAlias `default`.`src`
> [info]            +- HiveTableRelation `default`.`src`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [key#93207, value#93208]
> [info]
> [info]   == Optimized Logical Plan ==
> [info]   Project [1.0000003575641672 AS RADIANS(CAST(57.2958 AS DOUBLE))#93209]
> [info]   +- GlobalLimit 1
> [info]      +- LocalLimit 1
> [info]         +- Project
> [info]            +- InMemoryRelation [key#93207, value#93208], StorageLevel(disk, memory, deserialized, 1 replicas)
> [info]                  +- Scan hive default.src [key#93145, value#93146], HiveTableRelation `default`.`src`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [key#93145, value#93146]
> [info]
> [info]   == Physical Plan ==
> [info]   *(2) Project [1.0000003575641672 AS RADIANS(CAST(57.2958 AS DOUBLE))#93209]
> [info]   +- *(2) GlobalLimit 1
> [info]      +- Exchange SinglePartition, true
> [info]         +- *(1) LocalLimit 1
> [info]            +- Scan In-memory table src
> [info]                  +- InMemoryRelation [key#93207, value#93208], StorageLevel(disk, memory, deserialized, 1 replicas)
> [info]                        +- Scan hive default.src [key#93145, value#93146], HiveTableRelation `default`.`src`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [key#93145, value#93146]
> [info]
> [info]   RADIANS(CAST(57.2958 AS DOUBLE))
> [info]   !== HIVE - 1 row(s) ==   == CATALYST - 1 row(s) ==
> [info]   !1.000000357564167       1.0000003575641672 (HiveComparisonTest.scala:437)
> [info]   org.scalatest.exceptions.TestFailedException:
> [info]   at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
> [info]   at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
> [info]   at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
> [info]   at org.scalatest.Assertions.fail(Assertions.scala:1089)
> [info]   at org.scalatest.Assertions.fail$(Assertions.scala:1085)
> [info]   at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
> [info]   at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$33(HiveComparisonTest.scala:437)
> [info]   at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$33$adapted(HiveComparisonTest.scala:369)
> [info]   at scala.runtime.Tuple3Zipped$.$anonfun$foreach$1(Tuple3Zipped.scala:124)
> [info]   at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:39)
> [info]   at scala.runtime.Tuple3Zipped$.foreach$extension(Tuple3Zipped.scala:122)
> [info]   at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:369)
> [info]   at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:470)
> [info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
> [info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
> [info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> [info]   at org.scalatest.Transformer.apply(Transformer.scala:22)
> [info]   at org.scalatest.Transformer.apply(Transformer.scala:20)
> [info]   at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
> [info]   at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
> [info]   at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
> [info]   at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
> [info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
> [info]   at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
> [info]   at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
> [info]   at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
> [info]   at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
> [info]   at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
> [info]   at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
> [info]   at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
> [info]   at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
> [info]   at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
> [info]   at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
> [info]   at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
> [info]   at scala.collection.immutable.List.foreach(List.scala:392)
> [info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
> [info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
> [info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
> [info]   at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
> [info]   at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
> [info]   at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
> [info]   at org.scalatest.Suite.run(Suite.scala:1147)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
> [info]   at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
> [info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
> [info]   at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
> [info]   at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
> [info]   at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
> [info]   at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
> [info]   at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
> [info]   at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
> [info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> {noformat}
> How to reproduce:
> {code:java}
> public class TestRadians {
>   public static void main(String[] args) {
>     System.out.println(java.lang.Math.toRadians(57.2958));
>   }
> }
> {code}
> {code:sh}
> [root@spark-3267648 ~]# javac TestRadians.java
> [root@spark-3267648 ~]# /usr/lib/jdk-11.0.3/bin/java TestRadians
> 1.0000003575641672
> [root@spark-3267648 ~]# /usr/lib/jdk8u222-b10/bin/java TestRadians
> 1.000000357564167
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.14#76016)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org