You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2023/06/21 00:22:23 UTC
[spark] branch master updated: [SPARK-44122][CONNECT][TESTS] Make `connect` module pass except Arrow-related ones in Java 21
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new cafbea5b136 [SPARK-44122][CONNECT][TESTS] Make `connect` module pass except Arrow-related ones in Java 21
cafbea5b136 is described below
commit cafbea5b13623276517a9d716f75745eff91f616
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Tue Jun 20 17:22:14 2023 -0700
[SPARK-44122][CONNECT][TESTS] Make `connect` module pass except Arrow-related ones in Java 21
### What changes were proposed in this pull request?
This PR aims to `connect` module pass except `Arrow-based` ones in Java 21 environment. In addition, the following JIRA is created to enable them.
- SPARK-44121 Renable Arrow-based connect tests in Java 21
### Why are the changes needed?
Although `Arrow` is crucial in `connect` module, this PR identifies those tests and helps us monitor newly added ones in the future because they will cause a new failure.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass the CIs and manual tests.
```
$ java -version
openjdk version "21-ea" 2023-09-19
OpenJDK Runtime Environment (build 21-ea+27-2343)
OpenJDK 64-Bit Server VM (build 21-ea+27-2343, mixed mode, sharing)
```
**BEFORE**
```
$ build/sbt "connect/test"
...
[info] *** 9 TESTS FAILED ***
[error] Failed tests:
[error] org.apache.spark.sql.connect.planner.SparkConnectProtoSuite
[error] org.apache.spark.sql.connect.planner.SparkConnectPlannerSuite
[error] org.apache.spark.sql.connect.planner.SparkConnectServiceSuite
[error] (connect / Test / test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 67 s (01:07), completed Jun 20, 2023, 2:42:10 PM
```
**AFTER**
```
$ build/sbt "connect/test"
...
[info] Tests: succeeded 742, failed 0, canceled 10, ignored 0, pending 0
[info] All tests passed.
[success] Total time: 66 s (01:06), completed Jun 20, 2023, 2:40:35 PM
```
Closes #41679 from dongjoon-hyun/SPARK-44122.
Authored-by: Dongjoon Hyun <do...@apache.org>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
.../spark/sql/connect/planner/SparkConnectPlannerSuite.scala | 7 +++++++
.../spark/sql/connect/planner/SparkConnectProtoSuite.scala | 11 +++++++++++
.../spark/sql/connect/planner/SparkConnectServiceSuite.scala | 5 +++++
3 files changed, 23 insertions(+)
diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala
index ab01f2a6c14..14fdc8c0073 100644
--- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala
+++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala
@@ -21,6 +21,7 @@ import scala.collection.JavaConverters._
import com.google.protobuf.ByteString
import io.grpc.stub.StreamObserver
+import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.spark.SparkFunSuite
import org.apache.spark.connect.proto
@@ -439,6 +440,8 @@ class SparkConnectPlannerSuite extends SparkFunSuite with SparkConnectPlanTest {
}
test("transform LocalRelation") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
val rows = (0 until 10).map { i =>
InternalRow(i, UTF8String.fromString(s"str-$i"), InternalRow(i))
}
@@ -540,6 +543,8 @@ class SparkConnectPlannerSuite extends SparkFunSuite with SparkConnectPlanTest {
}
test("transform UnresolvedStar and ExpressionString") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
val sql =
"SELECT * FROM VALUES (1,'spark',1), (2,'hadoop',2), (3,'kafka',3) AS tab(id, name, value)"
val input = proto.Relation
@@ -576,6 +581,8 @@ class SparkConnectPlannerSuite extends SparkFunSuite with SparkConnectPlanTest {
}
test("transform UnresolvedStar with target field") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
val rows = (0 until 10).map { i =>
InternalRow(InternalRow(InternalRow(i, i + 1)))
}
diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
index 8cb5c1a2919..181564a3b60 100644
--- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
+++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
@@ -21,6 +21,7 @@ import java.nio.file.{Files, Paths}
import scala.collection.JavaConverters._
import com.google.protobuf.ByteString
+import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.spark.{SparkClassNotFoundException, SparkIllegalArgumentException}
import org.apache.spark.connect.proto
@@ -693,6 +694,8 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
}
test("WriteTo with create") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
withTable("testcat.table_name") {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
@@ -720,6 +723,8 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
}
test("WriteTo with create and using") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
val defaultOwnership = Map(TableCatalog.PROP_OWNER -> Utils.getCurrentUserName())
withTable("testcat.table_name") {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
@@ -757,6 +762,8 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
}
test("WriteTo with append") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
withTable("testcat.table_name") {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
@@ -788,6 +795,8 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
}
test("WriteTo with overwrite") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
withTable("testcat.table_name") {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
@@ -841,6 +850,8 @@ class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
}
test("WriteTo with overwritePartitions") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
withTable("testcat.table_name") {
spark.conf.set("spark.sql.catalog.testcat", classOf[InMemoryTableCatalog].getName)
diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala
index ed47e8a647c..bceaada9051 100644
--- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala
+++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala
@@ -24,6 +24,7 @@ import io.grpc.stub.StreamObserver
import org.apache.arrow.memory.RootAllocator
import org.apache.arrow.vector.{BigIntVector, Float8Vector}
import org.apache.arrow.vector.ipc.ArrowStreamReader
+import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.spark.connect.proto
import org.apache.spark.sql.connect.dsl.MockRemoteSession
@@ -130,6 +131,8 @@ class SparkConnectServiceSuite extends SharedSparkSession {
}
test("SPARK-41224: collect data using arrow") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
val instance = new SparkConnectService(false)
val connect = new MockRemoteSession()
val context = proto.UserContext
@@ -304,6 +307,8 @@ class SparkConnectServiceSuite extends SharedSparkSession {
}
test("Test observe response") {
+ // TODO(SPARK-44121) Renable Arrow-based connect tests in Java 21
+ assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17))
withTable("test") {
spark.sql("""
| CREATE TABLE test (col1 INT, col2 STRING)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org