You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2023/06/18 20:50:47 UTC
[spark] branch master updated: [SPARK-44034][TESTS] Add a new test group for sql module
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 1379680be8d [SPARK-44034][TESTS] Add a new test group for sql module
1379680be8d is described below
commit 1379680be8d54be466c5b80b0fc578126206e77e
Author: yangjie01 <ya...@baidu.com>
AuthorDate: Sun Jun 18 13:50:36 2023 -0700
[SPARK-44034][TESTS] Add a new test group for sql module
### What changes were proposed in this pull request?
The purpose of this pr is to add a new test tag `SlowSQLTest` to the sql module, and identified some Suites with test cases more than 3 seconds, and apply it to GA testing task to reduce the testing pressure of the `sql others` group.
For branches-3.3 and branches-3.4, a tag that will not appear in the sql module was assigned to the new test group to avoid `java.lang.ClassNotFoundException` and make this group build only without running any tests.
### Why are the changes needed?
For a long time, the sql module UTs has only two groups: `slow` and `others`. The test cases in group `slow` are fixed, while the number of test cases in group `others` continues to increase, which has had a certain impact on the testing duration and stability of group `others`.
So this PR proposes to add a new testing group to share the testing pressure of `sql others` group, which has made the testing time of the three groups more average, and hope it can improve the stability of the GA task.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Should monitor GA
Closes #41638 from LuciferYang/SPARK-44034-2.
Authored-by: yangjie01 <ya...@baidu.com>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
.github/workflows/build_and_test.yml | 17 ++++++++++++++++-
.../java/org/apache/spark/tags/SlowSQLTest.java | 21 ++++++++++-----------
.../spark/sql/ApproximatePercentileQuerySuite.scala | 2 ++
.../org/apache/spark/sql/CachedTableSuite.scala | 2 ++
.../apache/spark/sql/DataFrameAsOfJoinSuite.scala | 2 ++
.../scala/org/apache/spark/sql/DataFrameSuite.scala | 2 ++
.../spark/sql/DataFrameWindowFunctionsSuite.scala | 2 ++
.../org/apache/spark/sql/DatasetCacheSuite.scala | 3 ++-
.../test/scala/org/apache/spark/sql/JoinSuite.scala | 2 ++
.../WriteDistributionAndOrderingSuite.scala | 2 ++
.../sql/execution/BroadcastExchangeSuite.scala | 1 +
.../execution/OptimizeMetadataOnlyQuerySuite.scala | 2 ++
.../spark/sql/execution/QueryExecutionSuite.scala | 3 ++-
.../execution/adaptive/AdaptiveQueryExecSuite.scala | 2 ++
.../FileSourceAggregatePushDownSuite.scala | 5 +++++
.../execution/datasources/V1WriteCommandSuite.scala | 2 ++
.../parquet/ParquetRebaseDatetimeSuite.scala | 3 +++
.../datasources/parquet/ParquetRowIndexSuite.scala | 2 ++
.../execution/streaming/state/RocksDBSuite.scala | 2 ++
.../sql/execution/ui/AllExecutionsPageSuite.scala | 2 ++
.../spark/sql/expressions/ExpressionInfoSuite.scala | 2 ++
.../spark/sql/sources/BucketedReadSuite.scala | 2 ++
.../spark/sql/sources/BucketedWriteSuite.scala | 2 ++
.../DisableUnnecessaryBucketedScanSuite.scala | 3 +++
.../streaming/AcceptsLatestSeenOffsetSuite.scala | 2 ++
.../DeprecatedStreamingAggregationSuite.scala | 2 ++
.../sql/streaming/EventTimeWatermarkSuite.scala | 2 ++
.../spark/sql/streaming/FileStreamSinkSuite.scala | 3 +++
.../spark/sql/streaming/FileStreamSourceSuite.scala | 5 ++++-
.../spark/sql/streaming/FileStreamStressSuite.scala | 2 ++
...apGroupsInPandasWithStateDistributionSuite.scala | 2 ++
.../FlatMapGroupsInPandasWithStateSuite.scala | 2 ++
.../FlatMapGroupsWithStateDistributionSuite.scala | 2 ++
.../sql/streaming/FlatMapGroupsWithStateSuite.scala | 3 +++
...latMapGroupsWithStateWithInitialStateSuite.scala | 2 ++
.../sql/streaming/MemorySourceStressSuite.scala | 2 ++
.../sql/streaming/MultiStatefulOperatorsSuite.scala | 2 ++
.../spark/sql/streaming/RocksDBStateStoreTest.scala | 2 ++
.../apache/spark/sql/streaming/StreamSuite.scala | 2 ++
.../sql/streaming/StreamingAggregationSuite.scala | 3 +++
.../sql/streaming/StreamingDeduplicationSuite.scala | 3 +++
...StreamingDeduplicationWithinWatermarkSuite.scala | 2 ++
.../spark/sql/streaming/StreamingJoinSuite.scala | 5 +++++
.../sql/streaming/StreamingQueryListenerSuite.scala | 2 ++
.../sql/streaming/StreamingQueryManagerSuite.scala | 2 ++
.../spark/sql/streaming/StreamingQuerySuite.scala | 2 ++
.../StreamingSessionWindowDistributionSuite.scala | 2 ++
.../sql/streaming/StreamingSessionWindowSuite.scala | 2 ++
...treamingStateStoreFormatCompatibilitySuite.scala | 2 ++
.../sql/streaming/TriggerAvailableNowSuite.scala | 2 ++
.../ContinuousQueryStatusAndProgressSuite.scala | 2 ++
.../sql/streaming/continuous/ContinuousSuite.scala | 4 ++++
.../sources/StreamingDataSourceV2Suite.scala | 3 +++
.../test/DataStreamReaderWriterSuite.scala | 2 ++
.../streaming/test/DataStreamTableAPISuite.scala | 2 ++
.../sql/streaming/ui/StreamingQueryPageSuite.scala | 2 ++
.../spark/sql/streaming/ui/UISeleniumSuite.scala | 3 +++
57 files changed, 153 insertions(+), 15 deletions(-)
diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml
index a373b0e76e7..93d69f18740 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -175,12 +175,27 @@ jobs:
hadoop: ${{ inputs.hadoop }}
hive: hive2.3
included-tags: org.apache.spark.tags.ExtendedSQLTest
+ comment: "- extended tests"
+ - modules: sql
+ java: ${{ inputs.java }}
+ hadoop: ${{ inputs.hadoop }}
+ hive: hive2.3
+ # Using a tag that will not appear in sql module for placeholder, branch-3.3 and branch-3.4 will not run any UTs.
+ included-tags: >-
+ ${{
+ ((inputs.branch == 'branch-3.3' || inputs.branch == 'branch-3.4') && 'org.apache.spark.tags.SlowHiveTest')
+ || 'org.apache.spark.tags.SlowSQLTest'
+ }}
comment: "- slow tests"
- modules: sql
java: ${{ inputs.java }}
hadoop: ${{ inputs.hadoop }}
hive: hive2.3
- excluded-tags: org.apache.spark.tags.ExtendedSQLTest
+ excluded-tags: >-
+ ${{
+ ((inputs.branch == 'branch-3.3' || inputs.branch == 'branch-3.4') && 'org.apache.spark.tags.ExtendedSQLTest')
+ || 'org.apache.spark.tags.ExtendedSQLTest,org.apache.spark.tags.SlowSQLTest'
+ }}
comment: "- other tests"
env:
MODULES_TO_TEST: ${{ matrix.modules }}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
similarity index 69%
copy from sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
copy to common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
index 7f2972edea7..63211aa23b3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
+++ b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java
@@ -15,17 +15,16 @@
* limitations under the License.
*/
-package org.apache.spark.sql.streaming
+package org.apache.spark.tags;
-import org.apache.spark.sql.execution.streaming._
+import org.scalatest.TagAnnotation;
-class MemorySourceStressSuite extends StreamTest {
- import testImplicits._
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
- test("memory stress test") {
- val input = MemoryStream[Int]
- val mapped = input.toDS().map(_ + 1)
-
- runStressTest(mapped, AddData(input, _: _*))
- }
-}
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface SlowSQLTest { }
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
index 9237c9e9486..18e8dd6249b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala
@@ -25,10 +25,12 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile
import org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile.PercentileDigest
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
/**
* End-to-end tests for approximate percentile aggregate function.
*/
+@SlowSQLTest
class ApproximatePercentileQuerySuite extends QueryTest with SharedSparkSession {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 1f2235a10a9..8ad0cd6b7eb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -45,11 +45,13 @@ import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.storage.{RDDBlockId, StorageLevel}
import org.apache.spark.storage.StorageLevel.{MEMORY_AND_DISK_2, MEMORY_ONLY}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.{AccumulatorContext, Utils}
private case class BigData(s: String)
+@SlowSQLTest
class CachedTableSuite extends QueryTest with SQLTestUtils
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
index 2f8418f96c9..c4a1eaf5af4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala
@@ -23,7 +23,9 @@ import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class DataFrameAsOfJoinSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 1dc0254a0f9..c94c6d7e50e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -50,10 +50,12 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSparkSession}
import org.apache.spark.sql.test.SQLTestData.{ArrayStringWrapper, ContainerStringWrapper, DecimalData, StringWrapper, TestData2}
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
+@SlowSQLTest
class DataFrameSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
index 1ee9fd6a6a8..fe4a5cebc5d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
@@ -32,10 +32,12 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
/**
* Window function testing for DataFrame API.
*/
+@SlowSQLTest
class DataFrameWindowFunctionsSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
index 2f4098d7cc7..6033b9fee84 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
@@ -25,8 +25,9 @@ import org.apache.spark.sql.execution.columnar.{InMemoryRelation, InMemoryTableS
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.storage.StorageLevel
+import org.apache.spark.tags.SlowSQLTest
-
+@SlowSQLTest
class DatasetCacheSuite extends QueryTest
with SharedSparkSession
with TimeLimits
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index 60689f96700..4d0fd2e6513 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -38,7 +38,9 @@ import org.apache.spark.sql.execution.python.BatchEvalPythonExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.StructType
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class JoinSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
index 341b53f032a..881e077514f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala
@@ -40,7 +40,9 @@ import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger}
import org.apache.spark.sql.test.SQLTestData.TestData
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType}
import org.apache.spark.sql.util.QueryExecutionListener
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class WriteDistributionAndOrderingSuite extends DistributionAndOrderingSuiteBase {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala
index 9061764f759..31a8507cba0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala
@@ -100,6 +100,7 @@ class BroadcastExchangeSuite extends SparkPlanTest
}
// Additional tests run in 'local-cluster' mode.
+@ExtendedSQLTest
class BroadcastExchangeExecSparkSuite
extends SparkFunSuite with LocalSparkContext with AdaptiveSparkPlanHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
index 68691e2f7fd..cbc565974cd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala
@@ -24,7 +24,9 @@ import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.OPTIMIZER_METADATA_ONLY
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class OptimizeMetadataOnlyQuerySuite extends QueryTest with SharedSparkSession {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
index d2a101b2395..0a22efcb34d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
@@ -220,7 +220,8 @@ class QueryExecutionSuite extends SharedSparkSession {
assertNoTag(tag5, df.queryExecution.sparkPlan)
}
- test("Logging plan changes for execution") {
+ // TODO(SPARK-44074): re-enable this test after SPARK-44074 resolved
+ ignore("Logging plan changes for execution") {
val testAppender = new LogAppender("plan changes")
withLogAppender(testAppender) {
withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> "INFO") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
index 79f2b6b4657..68bae34790a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
@@ -46,8 +46,10 @@ import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.TestData
import org.apache.spark.sql.types.{IntegerType, StructType}
import org.apache.spark.sql.util.QueryExecutionListener
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class AdaptiveQueryExecSuite
extends QueryTest
with SharedSparkSession
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
index e8fae210fa4..317abd57a94 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala
@@ -28,6 +28,7 @@ import org.apache.spark.sql.functions.min
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{BinaryType, BooleanType, ByteType, DateType, Decimal, DecimalType, DoubleType, FloatType, IntegerType, LongType, ShortType, StringType, StructField, StructType, TimestampType}
+import org.apache.spark.tags.SlowSQLTest
/**
* A test suite that tests aggregate push down for Parquet and ORC.
@@ -543,12 +544,14 @@ abstract class ParquetAggregatePushDownSuite
SQLConf.PARQUET_AGGREGATE_PUSHDOWN_ENABLED.key
}
+@SlowSQLTest
class ParquetV1AggregatePushDownSuite extends ParquetAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
+@SlowSQLTest
class ParquetV2AggregatePushDownSuite extends ParquetAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
@@ -562,12 +565,14 @@ abstract class OrcAggregatePushDownSuite extends OrcTest with FileSourceAggregat
SQLConf.ORC_AGGREGATE_PUSHDOWN_ENABLED.key
}
+@SlowSQLTest
class OrcV1AggregatePushDownSuite extends OrcAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "orc")
}
+@SlowSQLTest
class OrcV2AggregatePushDownSuite extends OrcAggregatePushDownSuite {
override protected def sparkConf: SparkConf =
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala
index 20a90cd94b6..ce43edb79c1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.sql.types.{IntegerType, StringType}
import org.apache.spark.sql.util.QueryExecutionListener
+import org.apache.spark.tags.SlowSQLTest
trait V1WriteCommandSuiteBase extends SQLTestUtils {
@@ -105,6 +106,7 @@ trait V1WriteCommandSuiteBase extends SQLTestUtils {
}
}
+@SlowSQLTest
class V1WriteCommandSuite extends QueryTest with SharedSparkSession with V1WriteCommandSuiteBase {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
index 85c42cf7db8..240bb4e6dcb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala
@@ -28,6 +28,7 @@ import org.apache.spark.sql.internal.SQLConf.{LegacyBehaviorPolicy, ParquetOutpu
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.{CORRECTED, EXCEPTION, LEGACY}
import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType.{INT96, TIMESTAMP_MICROS, TIMESTAMP_MILLIS}
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
abstract class ParquetRebaseDatetimeSuite
extends QueryTest
@@ -461,6 +462,7 @@ abstract class ParquetRebaseDatetimeSuite
}
}
+@SlowSQLTest
class ParquetRebaseDatetimeV1Suite extends ParquetRebaseDatetimeSuite {
override protected def sparkConf: SparkConf =
super
@@ -468,6 +470,7 @@ class ParquetRebaseDatetimeV1Suite extends ParquetRebaseDatetimeSuite {
.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
+@SlowSQLTest
class ParquetRebaseDatetimeV2Suite extends ParquetRebaseDatetimeSuite {
override protected def sparkConf: SparkConf =
super
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala
index d9ba3456825..dd350ffd315 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala
@@ -34,7 +34,9 @@ import org.apache.spark.sql.functions.{col, max, min}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{LongType, StringType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class ParquetRowIndexSuite extends QueryTest with SharedSparkSession {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
index 3023a445930..e31b05c362f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.execution.streaming.CreateAtomicTestManager
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.{ThreadUtils, Utils}
trait RocksDBStateStoreChangelogCheckpointingTestUtil {
@@ -94,6 +95,7 @@ trait AlsoTestWithChangelogCheckpointingEnabled
}
}
+@SlowSQLTest
class RocksDBSuite extends AlsoTestWithChangelogCheckpointingEnabled with SharedSparkSession {
sqlConf.setConf(SQLConf.STATE_STORE_PROVIDER_CLASS, classOf[RocksDBStateStoreProvider].getName)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
index d1cd32f3621..fe5e99d3f3f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala
@@ -34,6 +34,7 @@ import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.{SparkPlanInfo, SQLExecution}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.status.{AppStatusStore, ElementTrackingStore}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
import org.apache.spark.util.kvstore.InMemoryStore
@@ -271,6 +272,7 @@ class AllExecutionsPageWithInMemoryStoreSuite extends AllExecutionsPageSuite {
}
}
+@SlowSQLTest
class AllExecutionsPageWithRocksDBBackendSuite extends AllExecutionsPageSuite {
private val storePath = Utils.createTempDir()
override protected def createStatusStore(): SQLAppStatusStore = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
index ad3a8844626..4dd93983e87 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala
@@ -25,8 +25,10 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.HiveResult.hiveResultString
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession {
test("Replace _FUNC_ in ExpressionInfo") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
index 0a3824e0298..776bcb3211f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
@@ -33,8 +33,10 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.collection.BitSet
+@SlowSQLTest
class BucketedReadWithoutHiveSupportSuite
extends BucketedReadSuite with SharedSparkSession {
protected override def beforeAll(): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
index 65f40cce18e..4f1b7d363a1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
@@ -29,7 +29,9 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class BucketedWriteWithoutHiveSupportSuite extends BucketedWriteSuite with SharedSparkSession {
protected override def beforeAll(): Unit = {
super.beforeAll()
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
index 737cffc42f9..1f55742cd67 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala
@@ -26,7 +26,9 @@ import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite
extends DisableUnnecessaryBucketedScanSuite
with SharedSparkSession
@@ -38,6 +40,7 @@ class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite
}
}
+@SlowSQLTest
class DisableUnnecessaryBucketedScanWithoutHiveSupportSuiteAE
extends DisableUnnecessaryBucketedScanSuite
with SharedSparkSession
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
index d3e9a08509b..b0c585ffda5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala
@@ -26,7 +26,9 @@ import org.apache.spark.sql.connector.read.streaming.{AcceptsLatestSeenOffset, S
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.execution.streaming.sources.{ContinuousMemoryStream, ContinuousMemoryStreamOffset}
import org.apache.spark.sql.types.{LongType, StructType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class AcceptsLatestSeenOffsetSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
index 99f7e32d4df..7777887ec62 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala
@@ -24,7 +24,9 @@ import org.apache.spark.sql.execution.streaming.state.StreamingAggregationStateM
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
@deprecated("This test suite will be removed.", "3.0.0")
class DeprecatedStreamingAggregationSuite extends StateStoreMetricsTest with Assertions {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
index ca8ad7f88bc..0b076e05957 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala
@@ -39,8 +39,10 @@ import org.apache.spark.sql.execution.streaming.sources.MemorySink
import org.apache.spark.sql.functions.{count, expr, timestamp_seconds, window}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class EventTimeWatermarkSuite extends StreamTest with BeforeAndAfter with Matchers with Logging {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
index 8c31d3c7abf..75f440caefc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
@@ -41,6 +41,7 @@ import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class FileStreamSinkSuite extends StreamTest {
@@ -673,6 +674,7 @@ class PendingCommitFilesTrackingManifestFileCommitProtocol(jobId: String, path:
}
}
+@SlowSQLTest
class FileStreamSinkV1Suite extends FileStreamSinkSuite {
override protected def sparkConf: SparkConf =
super
@@ -723,6 +725,7 @@ class FileStreamSinkV1Suite extends FileStreamSinkSuite {
}
}
+@SlowSQLTest
class FileStreamSinkV2Suite extends FileStreamSinkSuite {
override protected def sparkConf: SparkConf =
super
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
index 58ccf80d901..84cf20ede25 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
@@ -44,7 +44,8 @@ import org.apache.spark.sql.execution.streaming.sources.MemorySink
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.test.SharedSparkSession
-import org.apache.spark.sql.types.{StructType, _}
+import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class FileStreamSourceTest
@@ -226,6 +227,7 @@ abstract class FileStreamSourceTest
val valueSchema = new StructType().add("value", StringType)
}
+@SlowSQLTest
class FileStreamSourceSuite extends FileStreamSourceTest {
import testImplicits._
@@ -2362,6 +2364,7 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
}
}
+@SlowSQLTest
class FileStreamSourceStressTestSuite extends FileStreamSourceTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
index 28412ea07a7..a27a04283da 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala
@@ -24,6 +24,7 @@ import scala.util.Random
import scala.util.control.NonFatal
import org.apache.spark.sql.catalyst.util._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/**
@@ -36,6 +37,7 @@ import org.apache.spark.util.Utils
*
* At the end, the resulting files are loaded and the answer is checked.
*/
+@SlowSQLTest
class FileStreamStressSuite extends StreamTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala
index 9c6573fd782..1eae0fe9ef0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala
@@ -24,7 +24,9 @@ import org.apache.spark.sql.execution.python.FlatMapGroupsInPandasWithStateExec
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.util.{StatefulOpClusteredDistributionTestHelper, StreamManualClock}
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, StructType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class FlatMapGroupsInPandasWithStateDistributionSuite extends StreamTest
with StatefulOpClusteredDistributionTestHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala
index ca738b805eb..20fb17fe6ec 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala
@@ -28,7 +28,9 @@ import org.apache.spark.sql.functions.{lit, timestamp_seconds}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class FlatMapGroupsInPandasWithStateSuite extends StateStoreMetricsTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
index f1578ae5df9..b597a244710 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala
@@ -26,8 +26,10 @@ import org.apache.spark.sql.execution.streaming.{FlatMapGroupsWithStateExec, Mem
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.GroupStateTimeout.ProcessingTimeTimeout
import org.apache.spark.sql.streaming.util.{StatefulOpClusteredDistributionTestHelper, StreamManualClock}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class FlatMapGroupsWithStateDistributionSuite extends StreamTest
with StatefulOpClusteredDistributionTestHelper {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
index 9be699a17d2..a3774bf17e6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala
@@ -38,6 +38,7 @@ import org.apache.spark.sql.functions.timestamp_seconds
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.types.{DataType, IntegerType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/** Class to check custom state types */
@@ -45,6 +46,7 @@ case class RunningCount(count: Long)
case class Result(key: Long, count: Int)
+@SlowSQLTest
class FlatMapGroupsWithStateSuite extends StateStoreMetricsTest {
import testImplicits._
@@ -1102,5 +1104,6 @@ object FlatMapGroupsWithStateSuite {
}
}
+@SlowSQLTest
class RocksDBStateStoreFlatMapGroupsWithStateSuite
extends FlatMapGroupsWithStateSuite with RocksDBStateStoreTest
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala
index beee07b9fbc..2a2a83d35e1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala
@@ -26,7 +26,9 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.{assertCanGetProcessingTime, assertCannotGetWatermark}
import org.apache.spark.sql.streaming.GroupStateTimeout.{EventTimeTimeout, NoTimeout, ProcessingTimeTimeout}
import org.apache.spark.sql.streaming.util.StreamManualClock
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class FlatMapGroupsWithStateWithInitialStateSuite extends StateStoreMetricsTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
index 7f2972edea7..65ac6712ab4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala
@@ -18,7 +18,9 @@
package org.apache.spark.sql.streaming
import org.apache.spark.sql.execution.streaming._
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class MemorySourceStressSuite extends StreamTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala
index 9f00aa2e6ee..fb5445ae436 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala
@@ -26,8 +26,10 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, StateStoreSaveExe
import org.apache.spark.sql.execution.streaming.state.StateStore
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
// Tests for the multiple stateful operators support.
+@SlowSQLTest
class MultiStatefulOperatorsSuite
extends StreamTest with StateStoreMetricsTest with BeforeAndAfter {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/RocksDBStateStoreTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/RocksDBStateStoreTest.scala
index 4c73dd328b8..c466183f467 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/RocksDBStateStoreTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/RocksDBStateStoreTest.scala
@@ -23,7 +23,9 @@ import org.scalatest.Tag
import org.apache.spark.sql.execution.streaming.state.{RocksDBConf, RocksDBStateStoreProvider}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
trait RocksDBStateStoreTest extends SQLTestUtils {
val rocksdbChangelogCheckpointingConfKey: String = RocksDBConf.ROCKSDB_SQL_CONF_NAME_PREFIX +
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
index 8c6402359bc..6fd63454e82 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala
@@ -47,8 +47,10 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.StreamSourceProvider
import org.apache.spark.sql.streaming.util.{BlockOnStopSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.{IntegerType, LongType, StructField, StructType}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamSuite extends StreamTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
index 4ea59fe7405..03780478b33 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala
@@ -43,12 +43,14 @@ import org.apache.spark.sql.streaming.OutputMode._
import org.apache.spark.sql.streaming.util.{MockSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.{StructType, TimestampType}
import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
object FailureSingleton {
var firstTime = true
}
+@SlowSQLTest
class StreamingAggregationSuite extends StateStoreMetricsTest with Assertions {
import testImplicits._
@@ -957,5 +959,6 @@ class StreamingAggregationSuite extends StateStoreMetricsTest with Assertions {
}
}
+@SlowSQLTest
class RocksDBStateStoreStreamingAggregationSuite
extends StreamingAggregationSuite with RocksDBStateStoreTest
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
index 4c2a889c68d..c69088589cc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala
@@ -26,8 +26,10 @@ import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingDeduplicationSuite extends StateStoreMetricsTest {
import testImplicits._
@@ -484,5 +486,6 @@ class StreamingDeduplicationSuite extends StateStoreMetricsTest {
}
}
+@SlowSQLTest
class RocksDBStateStoreStreamingDeduplicationSuite
extends StreamingDeduplicationSuite with RocksDBStateStoreTest
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationWithinWatermarkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationWithinWatermarkSuite.scala
index c1435182c15..595fc1cb9ce 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationWithinWatermarkSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationWithinWatermarkSuite.scala
@@ -21,7 +21,9 @@ import org.apache.spark.sql.{AnalysisException, Dataset, SaveMode}
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.Append
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions.timestamp_seconds
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class StreamingDeduplicationWithinWatermarkSuite extends StateStoreMetricsTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
index c22e4459660..3e1bc57dfa2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala
@@ -36,6 +36,7 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorS
import org.apache.spark.sql.execution.streaming.state.{RocksDBStateStoreProvider, StateStore, StateStoreProviderId}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
abstract class StreamingJoinSuite
@@ -223,6 +224,7 @@ abstract class StreamingJoinSuite
}
}
+@SlowSQLTest
class StreamingInnerJoinSuite extends StreamingJoinSuite {
import testImplicits._
@@ -776,6 +778,7 @@ class StreamingInnerJoinSuite extends StreamingJoinSuite {
}
+@SlowSQLTest
class StreamingOuterJoinSuite extends StreamingJoinSuite {
import testImplicits._
@@ -1416,6 +1419,7 @@ class StreamingOuterJoinSuite extends StreamingJoinSuite {
}
}
+@SlowSQLTest
class StreamingFullOuterJoinSuite extends StreamingJoinSuite {
test("windowed full outer join") {
@@ -1619,6 +1623,7 @@ class StreamingFullOuterJoinSuite extends StreamingJoinSuite {
}
}
+@SlowSQLTest
class StreamingLeftSemiJoinSuite extends StreamingJoinSuite {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
index 6826d161d40..5b5e8732e0d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala
@@ -35,8 +35,10 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.StreamingQueryListener._
import org.apache.spark.sql.streaming.ui.StreamingQueryStatusListener
import org.apache.spark.sql.streaming.util.StreamManualClock
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.JsonProtocol
+@SlowSQLTest
class StreamingQueryListenerSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
index cc66ce85673..7deb0c69d12 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala
@@ -34,8 +34,10 @@ import org.apache.spark.sql.execution.datasources.v2.StreamingDataSourceV2Relati
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.BlockingSource
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingQueryManagerSuite extends StreamTest {
import AwaitTerminationTester._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
index 5c162835b12..b889ac18974 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
@@ -50,7 +50,9 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.{BlockingSource, MockSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.StructType
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class StreamingQuerySuite extends StreamTest with BeforeAndAfter with Logging with MockitoSugar {
import AwaitTerminationTester._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
index c252dc8f045..36c7459ce82 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala
@@ -28,8 +28,10 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, SessionWindowStat
import org.apache.spark.sql.functions.{count, session_window}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StatefulOpClusteredDistributionTestHelper
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class StreamingSessionWindowDistributionSuite extends StreamTest
with StatefulOpClusteredDistributionTestHelper with Logging {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
index 25b7506178d..12a4b7cf37a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala
@@ -29,7 +29,9 @@ import org.apache.spark.sql.execution.streaming.state.{HDFSBackedStateStoreProvi
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class StreamingSessionWindowSuite extends StreamTest
with BeforeAndAfter with Matchers with Logging {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
index 1032d6c5b6f..4827d06d64d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.Complete
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.execution.streaming.state.{InvalidUnsafeRowException, StateSchemaNotCompatible}
import org.apache.spark.sql.functions._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
/**
@@ -39,6 +40,7 @@ import org.apache.spark.util.Utils
* a new test for the issue, just like the test suite "SPARK-28067 changed the sum decimal unsafe
* row format".
*/
+@SlowSQLTest
class StreamingStateStoreFormatCompatibilitySuite extends StreamTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
index cb4410d9da9..65deca22207 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala
@@ -26,7 +26,9 @@ import org.apache.spark.sql.connector.read.streaming
import org.apache.spark.sql.connector.read.streaming.{ReadLimit, SupportsAdmissionControl}
import org.apache.spark.sql.execution.streaming.{LongOffset, MemoryStream, Offset, SerializedOffset, Source, StreamingExecutionRelation}
import org.apache.spark.sql.types.{LongType, StructType}
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class TriggerAvailableNowSuite extends FileStreamSourceTest {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
index 59d6ac0af52..d7aa99c30aa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala
@@ -20,7 +20,9 @@ package org.apache.spark.sql.streaming.continuous
import org.apache.spark.sql.execution.streaming.StreamExecution
import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream
import org.apache.spark.sql.streaming.Trigger
+import org.apache.spark.tags.SlowSQLTest
+@SlowSQLTest
class ContinuousQueryStatusAndProgressSuite extends ContinuousSuiteBase {
test("StreamingQueryStatus - ContinuousExecution isDataAvailable and isTriggerActive " +
"should be false") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
index fc6b51dce79..44de7af0b4a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf.{CONTINUOUS_STREAMING_EPOCH_BACKLOG_QUEUE_SIZE, MIN_BATCHES_TO_RETAIN}
import org.apache.spark.sql.streaming.{StreamTest, Trigger}
import org.apache.spark.sql.test.TestSparkSession
+import org.apache.spark.tags.SlowSQLTest
class ContinuousSuiteBase extends StreamTest {
// We need more than the default local[2] to be able to schedule all partitions simultaneously.
@@ -89,6 +90,7 @@ class ContinuousSuiteBase extends StreamTest {
override protected val defaultTrigger = Trigger.Continuous(100)
}
+@SlowSQLTest
class ContinuousSuite extends ContinuousSuiteBase {
import IntegratedUDFTestUtils._
import testImplicits._
@@ -297,6 +299,7 @@ class ContinuousSuite extends ContinuousSuiteBase {
}
}
+@SlowSQLTest
class ContinuousStressSuite extends ContinuousSuiteBase {
import testImplicits._
@@ -372,6 +375,7 @@ class ContinuousStressSuite extends ContinuousSuiteBase {
}
}
+@SlowSQLTest
class ContinuousMetaSuite extends ContinuousSuiteBase {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
index 9906defa96e..558b8973da1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala
@@ -35,8 +35,10 @@ import org.apache.spark.sql.sources.{DataSourceRegister, StreamSinkProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamTest, Trigger}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class FakeDataStream extends MicroBatchStream with ContinuousStream {
override def deserializeOffset(json: String): Offset = RateStreamOffset(Map())
override def commit(end: Offset): Unit = {}
@@ -269,6 +271,7 @@ object LastWriteOptions {
}
}
+@SlowSQLTest
class StreamingDataSourceV2Suite extends StreamTest {
override def beforeAll(): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
index 73ec5d89ff0..07a9ec4fdce 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
@@ -36,6 +36,7 @@ import org.apache.spark.sql.sources.{StreamSinkProvider, StreamSourceProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamingQueryException, StreamTest}
import org.apache.spark.sql.streaming.Trigger._
import org.apache.spark.sql.types._
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
object LastOptions {
@@ -108,6 +109,7 @@ class DefaultSource extends StreamSourceProvider with StreamSinkProvider {
}
}
+@SlowSQLTest
class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter {
import testImplicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
index 6bbf2239dbf..abe606ad9c1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala
@@ -38,8 +38,10 @@ import org.apache.spark.sql.streaming.StreamTest
import org.apache.spark.sql.streaming.sources.FakeScanBuilder
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.util.Utils
+@SlowSQLTest
class DataStreamTableAPISuite extends StreamTest with BeforeAndAfter {
import testImplicits._
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
index 78ade6a1eef..0f390dde263 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala
@@ -29,8 +29,10 @@ import org.apache.spark.SparkConf
import org.apache.spark.sql.execution.ui.StreamingQueryStatusStore
import org.apache.spark.sql.streaming.StreamingQueryProgress
import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.ui.SparkUI
+@SlowSQLTest
class StreamingQueryPageSuite extends SharedSparkSession with BeforeAndAfter {
test("correctly display streaming query page") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
index c3de44c3ba1..ca36528db5c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala
@@ -35,9 +35,11 @@ import org.apache.spark.sql.functions.{window => windowFn, _}
import org.apache.spark.sql.internal.SQLConf.SHUFFLE_PARTITIONS
import org.apache.spark.sql.internal.StaticSQLConf.ENABLED_STREAMING_UI_CUSTOM_METRIC_LIST
import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger}
+import org.apache.spark.tags.SlowSQLTest
import org.apache.spark.ui.SparkUICssErrorHandler
import org.apache.spark.util.Utils
+@SlowSQLTest
class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers {
implicit var webDriver: WebDriver = _
@@ -179,6 +181,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers {
}
}
+@SlowSQLTest
class UISeleniumWithRocksDBBackendSuite extends UISeleniumSuite {
private val storePath = Utils.createTempDir()
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org