You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ie...@apache.org on 2019/05/03 15:32:34 UTC

[beam] branch spark-runner_structured-streaming updated: Add Batch Validates Runner tests for Structured Streaming Runner

This is an automated email from the ASF dual-hosted git repository.

iemejia pushed a commit to branch spark-runner_structured-streaming
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/spark-runner_structured-streaming by this push:
     new cc31b85  Add Batch Validates Runner tests for Structured Streaming Runner
cc31b85 is described below

commit cc31b8570e4e3a6c65633dd5ec9ec7c0898280ba
Author: Ismaël Mejía <ie...@gmail.com>
AuthorDate: Fri May 3 17:26:26 2019 +0200

    Add Batch Validates Runner tests for Structured Streaming Runner
---
 runners/spark/build.gradle | 49 ++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 49 insertions(+)

diff --git a/runners/spark/build.gradle b/runners/spark/build.gradle
index 3414a34..8ea30ec 100644
--- a/runners/spark/build.gradle
+++ b/runners/spark/build.gradle
@@ -158,11 +158,60 @@ task validatesRunnerStreaming(type: Test) {
   }
 }
 
+task validatesStructuredStreamingRunnerBatch(type: Test) {
+  group = "Verification"
+  def pipelineOptions = JsonOutput.toJson([
+          "--runner=SparkStructuredStreamingRunner",
+          "--streaming=false",
+  ])
+  systemProperty "beamTestPipelineOptions", pipelineOptions
+  systemProperty "beam.spark.test.reuseSparkContext", "true"
+  systemProperty "spark.ui.enabled", "false"
+  systemProperty "spark.ui.showConsoleProgress", "false"
+
+  classpath = configurations.validatesRunner
+  testClassesDirs = files(project(":beam-sdks-java-core").sourceSets.test.output.classesDirs) + files(project(":beam-sdks-java-io-hadoop-format").sourceSets.test.output.classesDirs) + files(project.sourceSets.test.output.classesDirs)
+  // Only one SparkContext may be running in a JVM (SPARK-2243)
+  forkEvery 1
+  maxParallelForks 4
+  useJUnit {
+    includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCustomWindowMerging'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesFailureMessage'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesParDoLifecycle'
+    // Unbounded
+    excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedPCollections'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
+    // State and Timers
+    excludeCategories 'org.apache.beam.sdk.testing.UsesStatefulParDo'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesMapState'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesSetState'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesTimersInParDo'
+    // Metrics
+    excludeCategories 'org.apache.beam.sdk.testing.UsesAttemptedMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCounterMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesDistributionMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesGaugeMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesMetricsPusher'
+    // SDF
+    excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesSplittableParDoWithWindowedSideInputs'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
+    // Portability
+    excludeCategories 'org.apache.beam.sdk.testing.UsesImpulse'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms'
+    // Schema
+    excludeCategories 'org.apache.beam.sdk.testing.UsesSchema'
+  }
+}
+
 task validatesRunner {
   group = "Verification"
   description "Validates Spark runner"
   dependsOn validatesRunnerBatch
   dependsOn validatesRunnerStreaming
+  dependsOn validatesStructuredStreamingRunnerBatch
 }
 
 // Generates :beam-runners-spark:runQuickstartJavaSpark