You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ch...@apache.org on 2016/12/08 12:54:55 UTC
[2/3] flink git commit: [FLINK-5164] Disable some Hadoop-compat tests
on Windows
[FLINK-5164] Disable some Hadoop-compat tests on Windows
This closes #2889.
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/fe843e13
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/fe843e13
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/fe843e13
Branch: refs/heads/master
Commit: fe843e1377aa08a10394bbfa67dc9d3b2a23b805
Parents: 4414008
Author: zentol <ch...@apache.org>
Authored: Fri Nov 25 14:58:48 2016 +0100
Committer: zentol <ch...@apache.org>
Committed: Thu Dec 8 12:04:48 2016 +0100
----------------------------------------------------------------------
.../test/hadoopcompatibility/mapred/HadoopMapredITCase.java | 9 +++++++++
.../mapreduce/HadoopInputOutputITCase.java | 8 ++++++++
.../flink/test/hadoop/mapred/HadoopIOFormatsITCase.java | 9 +++++++++
.../flink/test/hadoop/mapred/WordCountMapredITCase.java | 9 +++++++++
.../test/hadoop/mapreduce/WordCountMapreduceITCase.java | 9 +++++++++
.../api/scala/hadoop/mapred/WordCountMapredITCase.scala | 8 ++++++++
.../scala/hadoop/mapreduce/WordCountMapreduceITCase.scala | 8 ++++++++
7 files changed, 60 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java
index ccc0d82..0b5a366 100644
--- a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java
+++ b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapredITCase.java
@@ -21,12 +21,21 @@ package org.apache.flink.test.hadoopcompatibility.mapred;
import org.apache.flink.test.hadoopcompatibility.mapred.example.HadoopMapredCompatWordCount;
import org.apache.flink.test.testdata.WordCountData;
import org.apache.flink.test.util.JavaProgramTestBase;
+import org.apache.flink.util.OperatingSystem;
+import org.junit.Assume;
+import org.junit.Before;
public class HadoopMapredITCase extends JavaProgramTestBase {
protected String textPath;
protected String resultPath;
+ @Before
+ public void checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows());
+ }
+
@Override
protected void preSubmit() throws Exception {
textPath = createTempFile("text.txt", WordCountData.TEXT);
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java
index 698e356..48aa258 100644
--- a/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java
+++ b/flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapreduce/HadoopInputOutputITCase.java
@@ -21,12 +21,20 @@ package org.apache.flink.test.hadoopcompatibility.mapreduce;
import org.apache.flink.test.hadoopcompatibility.mapreduce.example.WordCount;
import org.apache.flink.test.testdata.WordCountData;
import org.apache.flink.test.util.JavaProgramTestBase;
+import org.apache.flink.util.OperatingSystem;
+import org.junit.Assume;
+import org.junit.Before;
public class HadoopInputOutputITCase extends JavaProgramTestBase {
protected String textPath;
protected String resultPath;
+ @Before
+ public void checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows());
+ }
@Override
protected void preSubmit() throws Exception {
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java
index 0cb1ac5..468b780 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/HadoopIOFormatsITCase.java
@@ -26,6 +26,7 @@ import org.apache.flink.configuration.Configuration;
import org.apache.flink.api.java.hadoop.mapred.HadoopInputFormat;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.test.util.TestBaseUtils;
+import org.apache.flink.util.OperatingSystem;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
@@ -35,6 +36,8 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.junit.Assume;
+import org.junit.Before;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@@ -61,6 +64,12 @@ public class HadoopIOFormatsITCase extends JavaProgramTestBase {
super(config);
}
+ @Before
+ public void checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows());
+ }
+
@Override
protected void preSubmit() throws Exception {
resultPath = new String[] {getTempDirPath("result0"), getTempDirPath("result1") };
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java
index 80f311a..9528d94 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapred/WordCountMapredITCase.java
@@ -28,18 +28,27 @@ import static org.apache.flink.hadoopcompatibility.HadoopInputs.readHadoopFile;
import org.apache.flink.test.testdata.WordCountData;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.util.Collector;
+import org.apache.flink.util.OperatingSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
+import org.junit.Assume;
+import org.junit.Before;
public class WordCountMapredITCase extends JavaProgramTestBase {
protected String textPath;
protected String resultPath;
+ @Before
+ public void checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows());
+ }
+
@Override
protected void preSubmit() throws Exception {
textPath = createTempFile("text.txt", WordCountData.TEXT);
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java
index 3293770..64062d2 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/hadoop/mapreduce/WordCountMapreduceITCase.java
@@ -28,18 +28,27 @@ import static org.apache.flink.hadoopcompatibility.HadoopInputs.readHadoopFile;
import org.apache.flink.test.testdata.WordCountData;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.apache.flink.util.Collector;
+import org.apache.flink.util.OperatingSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.junit.Assume;
+import org.junit.Before;
public class WordCountMapreduceITCase extends JavaProgramTestBase {
protected String textPath;
protected String resultPath;
+ @Before
+ public void checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows());
+ }
+
@Override
protected void preSubmit() throws Exception {
textPath = createTempFile("text.txt", WordCountData.TEXT);
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala
index 6b414d6..9d04ca59 100644
--- a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala
+++ b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapred/WordCountMapredITCase.scala
@@ -21,14 +21,22 @@ import org.apache.flink.api.scala._
import org.apache.flink.hadoopcompatibility.scala.HadoopInputs
import org.apache.flink.test.testdata.WordCountData
import org.apache.flink.test.util.{JavaProgramTestBase, TestBaseUtils}
+import org.apache.flink.util.OperatingSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapred.{FileOutputFormat, JobConf, TextInputFormat, TextOutputFormat}
+import org.junit.{Assume, Before}
class WordCountMapredITCase extends JavaProgramTestBase {
protected var textPath: String = null
protected var resultPath: String = null
+ @Before
+ def checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows)
+ }
+
protected override def preSubmit() {
textPath = createTempFile("text.txt", WordCountData.TEXT)
resultPath = getTempDirPath("result")
http://git-wip-us.apache.org/repos/asf/flink/blob/fe843e13/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala
index e393d23..3b23a13 100644
--- a/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala
+++ b/flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala
@@ -22,16 +22,24 @@ import org.apache.flink.api.scala._
import org.apache.flink.hadoopcompatibility.scala.HadoopInputs
import org.apache.flink.test.testdata.WordCountData
import org.apache.flink.test.util.{TestBaseUtils, JavaProgramTestBase}
+import org.apache.flink.util.OperatingSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.{Text, LongWritable}
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat, TextOutputFormat}
+import org.junit.{Assume, Before}
class WordCountMapreduceITCase extends JavaProgramTestBase {
protected var textPath: String = null
protected var resultPath: String = null
+ @Before
+ def checkOperatingSystem() {
+ // FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
+ Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows)
+ }
+
protected override def preSubmit() {
textPath = createTempFile("text.txt", WordCountData.TEXT)
resultPath = getTempDirPath("result")