You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dd...@apache.org on 2009/05/28 12:57:31 UTC
svn commit: r779559 - in /hadoop/core/trunk: ./
src/contrib/streaming/src/java/org/apache/hadoop/streaming/
src/contrib/streaming/src/test/org/apache/hadoop/streaming/
Author: ddas
Date: Thu May 28 10:57:31 2009
New Revision: 779559
URL: http://svn.apache.org/viewvc?rev=779559&view=rev
Log:
HADOOP-5623. Fixes a problem to do with status messages getting overwritten in streaming jobs. Contributed by Rick Cox and Jothi Padmanabhan .
Added:
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/StderrApp.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=779559&r1=779558&r2=779559&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu May 28 10:57:31 2009
@@ -704,6 +704,9 @@
HADOOP-4864. Fixes a problem to do with -libjars with multiple jars when
client and cluster reside on different OSs. (Amareshwari Sriramadasu via ddas)
+ HADOOP-5623. Fixes a problem to do with status messages getting overwritten
+ in streaming jobs. (Rick Cox and Jothi Padmanabhan via ddas)
+
Release 0.20.1 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=779559&r1=779558&r2=779559&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java (original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java Thu May 28 10:57:31 2009
@@ -425,7 +425,11 @@
if (now-lastStdoutReport > reporterOutDelay_) {
lastStdoutReport = now;
String hline = "Records R/W=" + numRecRead_ + "/" + numRecWritten_;
- reporter.setStatus(hline);
+ if (!processProvidedStatus_) {
+ reporter.setStatus(hline);
+ } else {
+ reporter.progress();
+ }
logprintln(hline);
logflush();
}
@@ -476,6 +480,7 @@
if (matchesCounter(lineStr)) {
incrCounter(lineStr);
} else if (matchesStatus(lineStr)) {
+ processProvidedStatus_ = true;
setStatus(lineStr);
} else {
LOG.warn("Cannot parse reporter line: " + lineStr);
@@ -682,4 +687,5 @@
String LOGNAME;
PrintStream log_;
+ volatile boolean processProvidedStatus_ = false;
}
Modified: hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/StderrApp.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/StderrApp.java?rev=779559&r1=779558&r2=779559&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/StderrApp.java (original)
+++ hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/StderrApp.java Thu May 28 10:57:31 2009
@@ -32,8 +32,16 @@
* postWriteLines to stderr.
*/
public static void go(int preWriteLines, int sleep, int postWriteLines) throws IOException {
+ go(preWriteLines, sleep, postWriteLines, false);
+ }
+
+ public static void go(int preWriteLines, int sleep, int postWriteLines, boolean status) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
+
+ if (status) {
+ System.err.println("reporter:status:starting echo");
+ }
while (preWriteLines > 0) {
--preWriteLines;
@@ -57,13 +65,14 @@
public static void main(String[] args) throws IOException {
if (args.length < 3) {
- System.err.println("Usage: StderrApp PREWRITE SLEEP POSTWRITE");
+ System.err.println("Usage: StderrApp PREWRITE SLEEP POSTWRITE [STATUS]");
return;
}
int preWriteLines = Integer.parseInt(args[0]);
int sleep = Integer.parseInt(args[1]);
int postWriteLines = Integer.parseInt(args[2]);
+ boolean status = args.length > 3 ? Boolean.parseBoolean(args[3]) : false;
- go(preWriteLines, sleep, postWriteLines);
+ go(preWriteLines, sleep, postWriteLines, status);
}
}
Added: hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=779559&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (added)
+++ hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Thu May 28 10:57:31 2009
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.streaming;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.File;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.mapred.TaskReport;
+
+/**
+ * Tests for the ability of a streaming task to set the status
+ * by writing "reporter:status:" lines to stderr. Uses MiniMR
+ * since the local jobtracker doesn't track status.
+ */
+public class TestStreamingStatus extends TestCase {
+ private static String TEST_ROOT_DIR =
+ new File(System.getProperty("test.build.data","/tmp"))
+ .toURI().toString().replace(' ', '+');
+ protected String INPUT_FILE = TEST_ROOT_DIR + "/input.txt";
+ protected String OUTPUT_DIR = TEST_ROOT_DIR + "/out";
+ protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
+ protected String map = StreamUtil.makeJavaCommand(StderrApp.class, new String[]{"3", "0", "0", "true"});
+
+ protected String[] genArgs(int jobtrackerPort) {
+ return new String[] {
+ "-input", INPUT_FILE,
+ "-output", OUTPUT_DIR,
+ "-mapper", map,
+ "-jobconf", "mapred.map.tasks=1",
+ "-jobconf", "mapred.reduce.tasks=0",
+ "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
+ "-jobconf", "mapred.job.tracker=localhost:"+jobtrackerPort,
+ "-jobconf", "fs.default.name=file:///"
+ };
+ }
+
+ public void makeInput(FileSystem fs) throws IOException {
+ Path inFile = new Path(INPUT_FILE);
+ DataOutputStream file = fs.create(inFile);
+ file.writeBytes(input);
+ file.close();
+ }
+
+ public void clean(FileSystem fs) {
+ try {
+ Path outDir = new Path(OUTPUT_DIR);
+ fs.delete(outDir, true);
+ } catch (Exception e) {}
+ try {
+ Path inFile = new Path(INPUT_FILE);
+ fs.delete(inFile, false);
+ } catch (Exception e) {}
+ }
+
+ public void testStreamingStatus() throws Exception {
+ MiniMRCluster mr = null;
+ FileSystem fs = null;
+ try {
+ mr = new MiniMRCluster(1, "file:///", 3);
+
+ Path inFile = new Path(INPUT_FILE);
+ fs = inFile.getFileSystem(mr.createJobConf());
+ clean(fs);
+ makeInput(fs);
+
+ StreamJob job = new StreamJob();
+ int failed = job.run(genArgs(mr.getJobTrackerPort()));
+ assertEquals(0, failed);
+
+ TaskReport[] reports = job.jc_.getMapTaskReports(job.jobId_);
+ assertEquals(1, reports.length);
+ assertEquals("starting echo > sort", reports[0].getState());
+ } finally {
+ if (fs != null) { clean(fs); }
+ if (mr != null) { mr.shutdown(); }
+ }
+ }
+}