You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2023/03/07 08:10:18 UTC

[spark] branch branch-3.4 updated: [SPARK-42559][CONNECT][TESTS][FOLLOW-UP] Disable ANSI in several tests at DataFrameNaFunctionSuite.scala

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 8e63ca95be9 [SPARK-42559][CONNECT][TESTS][FOLLOW-UP] Disable ANSI in several tests at DataFrameNaFunctionSuite.scala
8e63ca95be9 is described below

commit 8e63ca95be9010ecb53bae78e229a20ccbe8bbce
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Tue Mar 7 17:09:57 2023 +0900

    [SPARK-42559][CONNECT][TESTS][FOLLOW-UP] Disable ANSI in several tests at DataFrameNaFunctionSuite.scala
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to disable ANSI mode in both `replace float with nan` and `replace double with nan` tests.
    
    ### Why are the changes needed?
    
    To recover the build https://github.com/apache/spark/actions/runs/4349682658 with ANSI mode on.
    Spark Connect side does not fully leverage the error framework yet .. so simply disabling it for now.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, test-only.
    
    ### How was this patch tested?
    
    Manually ran them in IDE with ANSI mode on.
    
    Closes #40311 from HyukjinKwon/SPARK-42559-followup.
    
    Authored-by: Hyukjin Kwon <gu...@apache.org>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
    (cherry picked from commit c3a09e283a63587d975aaf7dc62fdaae50c4056e)
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 .../spark/sql/DataFrameNaFunctionSuite.scala       | 23 +++++++++++++---------
 1 file changed, 14 insertions(+), 9 deletions(-)

diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala
index 5049147678b..1f6ea879248 100644
--- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala
+++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala
@@ -20,9 +20,10 @@ package org.apache.spark.sql
 import scala.collection.JavaConverters._
 
 import org.apache.spark.sql.connect.client.util.QueryTest
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.{StringType, StructType}
 
-class DataFrameNaFunctionSuite extends QueryTest {
+class DataFrameNaFunctionSuite extends QueryTest with SQLHelper {
   private def createDF(): DataFrame = {
     val sparkSession = spark
     import sparkSession.implicits._
@@ -386,17 +387,21 @@ class DataFrameNaFunctionSuite extends QueryTest {
   }
 
   test("replace float with nan") {
-    checkAnswer(
-      createNaNDF().na.replace("*", Map(1.0f -> Float.NaN)),
-      Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) ::
-        Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil)
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
+      checkAnswer(
+        createNaNDF().na.replace("*", Map(1.0f -> Float.NaN)),
+        Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) ::
+          Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil)
+    }
   }
 
   test("replace double with nan") {
-    checkAnswer(
-      createNaNDF().na.replace("*", Map(1.0 -> Double.NaN)),
-      Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) ::
-        Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil)
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
+      checkAnswer(
+        createNaNDF().na.replace("*", Map(1.0 -> Double.NaN)),
+        Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) ::
+          Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil)
+    }
   }
 
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org