You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2014/11/05 05:20:11 UTC

git commit: MAPREDUCE-6048. Fixed TestJavaSerialization failure. Contributed by Varun Vasudev

Repository: hadoop
Updated Branches:
  refs/heads/trunk 0762b4a3f -> 73068f677


MAPREDUCE-6048. Fixed TestJavaSerialization failure. Contributed by Varun Vasudev


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/73068f67
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/73068f67
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/73068f67

Branch: refs/heads/trunk
Commit: 73068f677bc45029743ba2e0b3c0256a8069c13e
Parents: 0762b4a
Author: Jian He <ji...@apache.org>
Authored: Tue Nov 4 20:19:54 2014 -0800
Committer: Jian He <ji...@apache.org>
Committed: Tue Nov 4 20:19:54 2014 -0800

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 ++
 .../hadoop/mapred/TestJavaSerialization.java    | 34 +++++++++++++-------
 2 files changed, 26 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/73068f67/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index dce4778..bbe96c2 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -456,6 +456,9 @@ Release 2.6.0 - UNRELEASED
     MAPREDUCE-6022. map_input_file is missing from streaming job environment.
     (jlowe via kihwal)
 
+    MAPREDUCE-6048. Fixed TestJavaSerialization failure. (Varun Vasudev via
+    jianhe)
+
 Release 2.5.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/73068f67/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
index 3ab6f56..4dea0d7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
@@ -17,11 +17,9 @@
  */
 package org.apache.hadoop.mapred;
 
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
@@ -30,6 +28,7 @@ import java.util.StringTokenizer;
 
 import junit.framework.TestCase;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -56,7 +55,10 @@ public class TestJavaSerialization extends TestCase {
         throws IOException {
       StringTokenizer st = new StringTokenizer(value.toString());
       while (st.hasMoreTokens()) {
-        output.collect(st.nextToken(), 1L);
+        String token = st.nextToken();
+        assertTrue("Invalid token; expected 'a' or 'b', got " + token,
+          token.equals("a") || token.equals("b"));
+        output.collect(token, 1L);
       }
     }
 
@@ -116,18 +118,28 @@ public class TestJavaSerialization extends TestCase {
 
     FileOutputFormat.setOutputPath(conf, OUTPUT_DIR);
 
+    String inputFileContents =
+        FileUtils.readFileToString(new File(INPUT_FILE.toUri().getPath()));
+    assertTrue("Input file contents not as expected; contents are '"
+        + inputFileContents + "', expected \"b a\n\" ",
+      inputFileContents.equals("b a\n"));
+
     JobClient.runJob(conf);
 
-    Path[] outputFiles = FileUtil.stat2Paths(
-        fs.listStatus(OUTPUT_DIR, 
-                      new Utils.OutputFileUtils.OutputFilesFilter()));
+    Path[] outputFiles =
+        FileUtil.stat2Paths(fs.listStatus(OUTPUT_DIR,
+          new Utils.OutputFileUtils.OutputFilesFilter()));
     assertEquals(1, outputFiles.length);
     InputStream is = fs.open(outputFiles[0]);
-    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
-    assertEquals("a\t1", reader.readLine());
-    assertEquals("b\t1", reader.readLine());
-    assertNull(reader.readLine());
-    reader.close();
+    String reduceOutput = org.apache.commons.io.IOUtils.toString(is);
+    String[] lines = reduceOutput.split(System.getProperty("line.separator"));
+    assertEquals("Unexpected output; received output '" + reduceOutput + "'",
+      "a\t1", lines[0]);
+    assertEquals("Unexpected output; received output '" + reduceOutput + "'",
+      "b\t1", lines[1]);
+    assertEquals("Reduce output has extra lines; output is '" + reduceOutput
+        + "'", 2, lines.length);
+    is.close();
   }
 
   /**