You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by dh...@apache.org on 2016/07/26 17:52:37 UTC

[2/3] incubator-beam git commit: Remove spark test dependency on DirectRunner

Remove spark test dependency on DirectRunner

Rather than assert that SparkRunner matches DirectRunner, assert that SparkRunner is correct.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/51011581
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/51011581
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/51011581

Branch: refs/heads/master
Commit: 51011581f70fa1f65de62adaba94c193af03fb61
Parents: 8597a3c
Author: Dan Halperin <dh...@google.com>
Authored: Mon Jul 25 18:37:57 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Tue Jul 26 10:52:28 2016 -0700

----------------------------------------------------------------------
 runners/spark/pom.xml                                  |  5 -----
 .../spark/translation/TransformTranslatorTest.java     | 13 +++++++------
 2 files changed, 7 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/51011581/runners/spark/pom.xml
----------------------------------------------------------------------
diff --git a/runners/spark/pom.xml b/runners/spark/pom.xml
index 665f15d..60d9ef3 100644
--- a/runners/spark/pom.xml
+++ b/runners/spark/pom.xml
@@ -246,11 +246,6 @@
       <artifactId>hamcrest-all</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.beam</groupId>
-      <artifactId>beam-runners-direct-java</artifactId>
-      <version>0.2.0-incubating-SNAPSHOT</version>
-    </dependency>
 
     <!-- Depend on test jar to scan for RunnableOnService tests -->
     <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/51011581/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/TransformTranslatorTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/TransformTranslatorTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/TransformTranslatorTest.java
index a17e8f3..e1789f1 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/TransformTranslatorTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/TransformTranslatorTest.java
@@ -21,7 +21,6 @@ package org.apache.beam.runners.spark.translation;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.junit.Assert.assertThat;
 
-import org.apache.beam.runners.direct.DirectRunner;
 import org.apache.beam.runners.spark.SparkRunner;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.TextIO;
@@ -31,6 +30,7 @@ import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.values.PCollection;
 
 import com.google.common.base.Charsets;
+import com.google.common.io.Files;
 
 import org.apache.commons.io.FileUtils;
 import org.junit.Rule;
@@ -41,6 +41,8 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Pattern;
@@ -62,19 +64,18 @@ public class TransformTranslatorTest {
    */
   @Test
   public void testTextIOReadAndWriteTransforms() throws IOException {
-    String directOut = runPipeline(DirectRunner.class);
     String sparkOut = runPipeline(SparkRunner.class);
 
-    File directOutFile = new File(directOut);
-    List<String> directOutput =
-            readFromOutputFiles(directOutFile.getParentFile(), directOutFile.getName());
+    List<String> lines =
+        Files.readLines(
+            Paths.get("src/test/resources/test_text.txt").toFile(), StandardCharsets.UTF_8);
 
     File sparkOutFile = new File(sparkOut);
     List<String> sparkOutput =
             readFromOutputFiles(sparkOutFile.getParentFile(), sparkOutFile.getName());
 
     // sort output to get a stable result (PCollections are not ordered)
-    assertThat(sparkOutput, containsInAnyOrder(directOutput.toArray()));
+    assertThat(sparkOutput, containsInAnyOrder(lines.toArray()));
   }
 
   private String runPipeline(Class<? extends PipelineRunner<?>> runner) throws IOException {