You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by st...@apache.org on 2009/11/28 21:26:22 UTC
svn commit: r885145 [10/34] - in /hadoop/mapreduce/branches/MAPREDUCE-233:
./ .eclipse.templates/ .eclipse.templates/.launches/ conf/ ivy/ lib/
src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/
src/benchmarks/gridmix2/ src/benchmarks/gridmix2/sr...
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Sat Nov 28 20:26:01 2009
@@ -28,6 +28,7 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.streaming.io.InputWriter;
import org.apache.hadoop.streaming.io.OutputReader;
import org.apache.hadoop.util.StringUtils;
@@ -69,7 +70,7 @@
//processed records could be different(equal or less) than the no of
//records input.
SkipBadRecords.setAutoIncrReducerProcCount(job, false);
- skipping = job.getBoolean("mapred.skip.on", false);
+ skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
try {
reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t").getBytes("UTF-8");
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Sat Nov 28 20:26:01 2009
@@ -45,9 +45,12 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.InvalidJobConfException;
@@ -123,6 +126,8 @@
return submitAndMonitorJob();
}catch (IllegalArgumentException ex) {
//ignore, since log will already be printed
+ // print the log in debug mode.
+ LOG.debug("Error in streaming job", ex);
return 1;
}
}
@@ -348,13 +353,13 @@
return OptionBuilder.withDescription(desc).create(name);
}
- private static void validate(final List<String> values)
+ private void validate(final List<String> values)
throws IllegalArgumentException {
for (String file : values) {
File f = new File(file);
if (!f.canRead()) {
- throw new IllegalArgumentException("File : " + f.getAbsolutePath()
- + " is not readable.");
+ fail("File: " + f.getAbsolutePath()
+ + " does not exist, or is not readable.");
}
}
}
@@ -503,7 +508,7 @@
System.out.println(" The location of this working directory is unspecified.");
System.out.println();
System.out.println("To set the number of reduce tasks (num. of output files):");
- System.out.println(" -D mapred.reduce.tasks=10");
+ System.out.println(" -D " + JobContext.NUM_REDUCES + "=10");
System.out.println("To skip the sort/combine/shuffle/sort/reduce step:");
System.out.println(" Use -numReduceTasks 0");
System.out
@@ -514,18 +519,18 @@
System.out.println(" This equivalent -reducer NONE");
System.out.println();
System.out.println("To speed up the last maps:");
- System.out.println(" -D mapred.map.tasks.speculative.execution=true");
+ System.out.println(" -D " + JobContext.MAP_SPECULATIVE + "=true");
System.out.println("To speed up the last reduces:");
- System.out.println(" -D mapred.reduce.tasks.speculative.execution=true");
+ System.out.println(" -D " + JobContext.REDUCE_SPECULATIVE + "=true");
System.out.println("To name the job (appears in the JobTracker Web UI):");
- System.out.println(" -D mapred.job.name='My Job' ");
+ System.out.println(" -D " + JobContext.JOB_NAME + "='My Job'");
System.out.println("To change the local temp directory:");
System.out.println(" -D dfs.data.dir=/tmp/dfs");
System.out.println(" -D stream.tmpdir=/tmp/streaming");
System.out.println("Additional local temp directories with -cluster local:");
- System.out.println(" -D mapred.local.dir=/tmp/local");
- System.out.println(" -D mapred.system.dir=/tmp/system");
- System.out.println(" -D mapred.temp.dir=/tmp/temp");
+ System.out.println(" -D " + MRConfig.LOCAL_DIR + "=/tmp/local");
+ System.out.println(" -D " + JTConfig.JT_SYSTEM_DIR + "=/tmp/system");
+ System.out.println(" -D " + MRConfig.TEMP_DIR + "=/tmp/temp");
System.out.println("To treat tasks with non-zero exit status as SUCCEDED:");
System.out.println(" -D stream.non.zero.exit.is.failure=false");
System.out.println("Use a custom hadoopStreaming build along a standard hadoop install:");
@@ -610,7 +615,7 @@
if (packageFiles_.size() + unjarFiles.size() == 0) {
return null;
}
- String tmp = jobConf_.get("stream.tmpdir"); //, "/tmp/${user.name}/"
+ String tmp = jobConf_.get("stream.tmpdir"); //, "/tmp/${mapreduce.job.user.name}/"
File tmpDir = (tmp == null) ? null : new File(tmp);
// tmpDir=null means OS default tmp dir
File jobJar = File.createTempFile("streamjob", ".jar", tmpDir);
@@ -652,7 +657,7 @@
// The correct FS must be set before this is called!
// (to resolve local vs. dfs drive letter differences)
- // (mapred.working.dir will be lazily initialized ONCE and depends on FS)
+ // (mapreduce.job.working.dir will be lazily initialized ONCE and depends on FS)
for (int i = 0; i < inputSpecs_.size(); i++) {
FileInputFormat.addInputPaths(jobConf_,
(String) inputSpecs_.get(i));
@@ -894,7 +899,7 @@
}
protected String getJobTrackerHostPort() {
- return jobConf_.get("mapred.job.tracker");
+ return jobConf_.get(JTConfig.JT_IPC_ADDRESS);
}
protected void jobInfo() {
@@ -903,7 +908,7 @@
} else {
String hp = getJobTrackerHostPort();
LOG.info("To kill this job, run:");
- LOG.info(getHadoopClientHome() + "/bin/hadoop job -Dmapred.job.tracker=" + hp + " -kill "
+ LOG.info(getHadoopClientHome() + "/bin/hadoop job -D" + JTConfig.JT_IPC_ADDRESS + "=" + hp + " -kill "
+ jobId_);
//LOG.info("Job file: " + running_.getJobFile());
LOG.info("Tracking URL: " + StreamUtil.qualifyHost(running_.getTrackingURL()));
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java Sat Nov 28 20:26:01 2009
@@ -36,6 +36,8 @@
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
/** Utilities not available elsewhere in Hadoop.
*
@@ -420,13 +422,13 @@
// JobConf helpers
public static FileSplit getCurrentSplit(JobConf job) {
- String path = job.get("map.input.file");
+ String path = job.get(JobContext.MAP_INPUT_FILE);
if (path == null) {
return null;
}
Path p = new Path(path);
- long start = Long.parseLong(job.get("map.input.start"));
- long length = Long.parseLong(job.get("map.input.length"));
+ long start = Long.parseLong(job.get(JobContext.MAP_INPUT_START));
+ long length = Long.parseLong(job.get(JobContext.MAP_INPUT_PATH));
return new FileSplit(p, start, length, job);
}
@@ -439,16 +441,16 @@
}
public static boolean isLocalJobTracker(JobConf job) {
- return job.get("mapred.job.tracker", "local").equals("local");
+ return job.get(JTConfig.JT_IPC_ADDRESS, "local").equals("local");
}
public static TaskId getTaskInfo(JobConf job) {
TaskId res = new TaskId();
- String id = job.get("mapred.task.id");
+ String id = job.get(JobContext.TASK_ATTEMPT_ID);
if (isLocalJobTracker(job)) {
// it uses difft naming
- res.mapTask = job.getBoolean("mapred.task.is.map", true);
+ res.mapTask = job.getBoolean(JobContext.TASK_ISMAP, true);
res.jobid = "0";
res.taskid = 0;
res.execid = 0;
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java Sat Nov 28 20:26:01 2009
@@ -149,7 +149,7 @@
} else if (code == Type.MARKER.code) {
return null;
} else if (50 <= code && code <= 200) { // application-specific typecodes
- return readRawBytes();
+ return readRawBytes(code);
} else {
throw new RuntimeException("unknown type");
}
@@ -202,14 +202,15 @@
}
/**
- * Reads the raw bytes following a <code>Type.BYTES</code> code.
+ * Reads the raw bytes following a custom code.
+ * @param code the custom type code
* @return the obtained bytes sequence
* @throws IOException
*/
- public byte[] readRawBytes() throws IOException {
+ public byte[] readRawBytes(int code) throws IOException {
int length = in.readInt();
byte[] bytes = new byte[5 + length];
- bytes[0] = (byte) Type.BYTES.code;
+ bytes[0] = (byte) code;
bytes[1] = (byte) (0xff & (length >> 24));
bytes[2] = (byte) (0xff & (length >> 16));
bytes[3] = (byte) (0xff & (length >> 8));
@@ -217,6 +218,15 @@
in.readFully(bytes, 5, length);
return bytes;
}
+
+ /**
+ * Reads the raw bytes following a <code>Type.BYTES</code> code.
+ * @return the obtained bytes sequence
+ * @throws IOException
+ */
+ public byte[] readRawBytes() throws IOException {
+ return readRawBytes(Type.BYTES.code);
+ }
/**
* Reads the byte following a <code>Type.BYTE</code> code.
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java Sat Nov 28 20:26:01 2009
@@ -32,6 +32,7 @@
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.hdfs.MiniDFSCluster;
/**
@@ -69,7 +70,7 @@
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().getAuthority();
mr = new MiniMRCluster(1, namenode, 3);
- strJobTracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+ strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
strNamenode = "fs.default.name=" + namenode;
} catch (Exception e) {
e.printStackTrace();
@@ -116,7 +117,7 @@
"-output", OUTPUT_DIR,
"-mapper", "xargs cat",
"-reducer", "cat",
- "-jobconf", "mapred.reduce.tasks=1",
+ "-jobconf", "mapreduce.job.reduces=1",
"-cacheArchive", cacheArchiveString1,
"-cacheArchive", cacheArchiveString2,
"-jobconf", strNamenode,
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Sat Nov 28 20:26:01 2009
@@ -34,7 +34,8 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
/**
* This test case tests the symlink creation
* utility provided by distributed caching
@@ -73,7 +74,7 @@
mr = new MiniMRCluster(1, namenode, 3);
// During tests, the default Configuration will use a local mapred
// So don't specify -config or -cluster
- String strJobtracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+ String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
String strNamenode = "fs.default.name=" + namenode;
String argv[] = new String[] {
"-input", INPUT_FILE,
@@ -122,7 +123,8 @@
String line2 = null;
Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
new Path(OUTPUT_DIR),
- new OutputLogFilter()));
+ new Utils.OutputFileUtils
+ .OutputFilesFilter()));
for (int i = 0; i < fileList.length; i++){
System.out.println(fileList[i].toString());
BufferedReader bread =
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java Sat Nov 28 20:26:01 2009
@@ -24,6 +24,7 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
import junit.framework.TestCase;
@@ -54,7 +55,7 @@
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-jobconf", "stream.map.output=rawbytes",
"-jobconf", "stream.reduce.input=rawbytes",
@@ -65,7 +66,7 @@
public void testCommandLine() throws Exception {
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -86,10 +87,8 @@
System.err.println(" out1=" + output);
assertEquals(outputExpect, output);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
}
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Sat Nov 28 20:26:01 2009
@@ -20,10 +20,9 @@
import junit.framework.TestCase;
import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.mapreduce.JobContext;
/**
* This class tests hadoopStreaming in MapReduce local mode.
@@ -65,7 +64,7 @@
"-reducer", "aggregate",
//"-verbose",
//"-jobconf", "stream.debug=set"
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -74,7 +73,7 @@
{
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -94,10 +93,12 @@
} catch(Exception e) {
failTrace(e);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ failTrace(e);
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java Sat Nov 28 20:26:01 2009
@@ -23,6 +23,7 @@
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
@@ -70,10 +71,10 @@
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
- "-jobconf", "map.output.key.field.separator=.",
+ "-jobconf", "mapreduce.mapreduce.mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
- "-jobconf", "mapred.reduce.tasks=2",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.job.reduces=2",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -82,7 +83,7 @@
{
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -103,10 +104,12 @@
} catch(Exception e) {
failTrace(e);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ failTrace(e);
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java Sat Nov 28 20:26:01 2009
@@ -23,6 +23,7 @@
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
/**
@@ -64,7 +65,7 @@
"-mapper", map,
"-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
"-numReduceTasks", "0",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -75,7 +76,7 @@
File outFile = null;
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -94,11 +95,12 @@
} catch(Exception e) {
failTrace(e);
} finally {
- outFile.delete();
- File outFileCRC = new File(OUTPUT_DIR, "."+outFileName+".crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ failTrace(e);
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java Sat Nov 28 20:26:01 2009
@@ -22,6 +22,8 @@
import java.io.FileOutputStream;
import java.io.IOException;
+import org.apache.hadoop.fs.FileUtil;
+
/**
* This class tests StreamXmlRecordReader
* The test creates an XML file, uses StreamXmlRecordReader and compares
@@ -61,7 +63,7 @@
public void testCommandLine() {
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
createInput();
@@ -74,10 +76,12 @@
} catch (Exception e) {
e.printStackTrace();
} finally {
- INPUT_FILE.delete();
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java Sat Nov 28 20:26:01 2009
@@ -21,6 +21,8 @@
import junit.framework.TestCase;
import java.io.*;
+import org.apache.hadoop.fs.FileUtil;
+
/**
* This class tests hadoopStreaming in MapReduce local mode.
*/
@@ -64,7 +66,7 @@
"-reducer", reduce,
//"-verbose",
//"-jobconf", "stream.debug=set"
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -73,7 +75,7 @@
{
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -91,10 +93,12 @@
System.err.println(" out1=" + output);
assertEquals(outputExpect, output);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java Sat Nov 28 20:26:01 2009
@@ -61,7 +61,7 @@
new String[] { "-input", inputPath.makeQualified(inFs).toString(),
"-output", outputPath.makeQualified(outFs).toString(), "-mapper",
map, "-reducer", reduce, "-jobconf",
- "keep.failed.task.files=true", "-jobconf",
+ "mapreduce.task.files.preserve.failedtasks=true", "-jobconf",
"stream.tmpdir=" + System.getProperty("test.build.data", "/tmp") };
StreamJob streamJob = new StreamJob(args, true);
streamJob.setConf(myConf);
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java Sat Nov 28 20:26:01 2009
@@ -38,9 +38,10 @@
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputLogFilter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.mapred.Utils;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
{
@@ -69,7 +70,7 @@
protected void setUp() throws Exception {
Properties props = new Properties();
- props.setProperty("mapred.job.tracker.retire.jobs", "false");
+ props.setProperty(JTConfig.JT_RETIREJOBS, "false");
startCluster(true, props);
}
@@ -125,7 +126,7 @@
badRecs.addAll(REDUCER_BAD_RECORDS);
Path[] outputFiles = FileUtil.stat2Paths(
getFileSystem().listStatus(getOutputDir(),
- new OutputLogFilter()));
+ new Utils.OutputFileUtils.OutputFilesFilter()));
if (outputFiles.length > 0) {
InputStream is = getFileSystem().open(outputFiles[0]);
@@ -169,20 +170,21 @@
"-reducer", badReducer,
"-verbose",
"-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
- "-jobconf", "mapred.skip.attempts.to.start.skipping="+attSkip,
- "-jobconf", "mapred.skip.out.dir=none",
- "-jobconf", "mapred.map.max.attempts="+mapperAttempts,
- "-jobconf", "mapred.reduce.max.attempts="+reducerAttempts,
- "-jobconf", "mapred.skip.map.max.skip.records="+Long.MAX_VALUE,
- "-jobconf", "mapred.skip.reduce.max.skip.groups="+Long.MAX_VALUE,
- "-jobconf", "mapred.map.tasks=1",
- "-jobconf", "mapred.reduce.tasks=1",
+ "-jobconf", "mapreduce.task.skip.start.attempts="+attSkip,
+ "-jobconf", "mapreduce.job.skip.outdir=none",
+ "-jobconf", "mapreduce.map.maxattempts="+mapperAttempts,
+ "-jobconf", "mapreduce.reduce.maxattempts="+reducerAttempts,
+ "-jobconf", "mapreduce.map.skip.maxrecords="+Long.MAX_VALUE,
+ "-jobconf", "mapreduce.reduce.skip.maxgroups="+Long.MAX_VALUE,
+ "-jobconf", "mapreduce.job.maps=1",
+ "-jobconf", "mapreduce.job.reduces=1",
"-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
- "-jobconf", "mapred.job.tracker="+clusterConf.get("mapred.job.tracker"),
- "-jobconf", "mapred.job.tracker.http.address="
- +clusterConf.get("mapred.job.tracker.http.address"),
+ "-jobconf", "mapreduce.jobtracker.address=" +
+ clusterConf.get(JTConfig.JT_IPC_ADDRESS),
+ "-jobconf", "mapreduce.jobtracker.http.address="
+ +clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
"-jobconf", "stream.debug=set",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
StreamJob job = new StreamJob(args, false);
@@ -202,22 +204,22 @@
"-reducer", badReducer,
"-verbose",
"-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
- "-jobconf", "mapred.skip.attempts.to.start.skipping=1",
+ "-jobconf", "mapreduce.task.skip.start.attempts=1",
//actually fewer attempts are required than specified
//but to cater to the case of slow processed counter update, need to
//have more attempts
- "-jobconf", "mapred.map.max.attempts=20",
- "-jobconf", "mapred.reduce.max.attempts=15",
- "-jobconf", "mapred.skip.map.max.skip.records=1",
- "-jobconf", "mapred.skip.reduce.max.skip.groups=1",
- "-jobconf", "mapred.map.tasks=1",
- "-jobconf", "mapred.reduce.tasks=1",
+ "-jobconf", "mapreduce.map.maxattempts=20",
+ "-jobconf", "mapreduce.reduce.maxattempts=15",
+ "-jobconf", "mapreduce.map.skip.maxrecords=1",
+ "-jobconf", "mapreduce.reduce.skip.maxgroups=1",
+ "-jobconf", "mapreduce.job.maps=1",
+ "-jobconf", "mapreduce.job.reduces=1",
"-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
- "-jobconf", "mapred.job.tracker="+clusterConf.get("mapred.job.tracker"),
- "-jobconf", "mapred.job.tracker.http.address="
- +clusterConf.get("mapred.job.tracker.http.address"),
+ "-jobconf", "mapreduce.jobtracker.address="+clusterConf.get(JTConfig.JT_IPC_ADDRESS),
+ "-jobconf", "mapreduce.jobtracker.http.address="
+ +clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
"-jobconf", "stream.debug=set",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
StreamJob job = new StreamJob(args, false);
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java Sat Nov 28 20:26:01 2009
@@ -21,6 +21,7 @@
import java.io.File;
import java.io.IOException;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.Counters.Group;
@@ -38,7 +39,7 @@
{
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -62,10 +63,12 @@
assertNotNull("Counter", counter);
assertEquals(3, counter.getCounter());
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java Sat Nov 28 20:26:01 2009
@@ -21,6 +21,8 @@
import junit.framework.TestCase;
import java.io.*;
+import org.apache.hadoop.fs.FileUtil;
+
/**
* This class tests hadoopStreaming in MapReduce local mode by giving
* empty input to mapper and the mapper generates nonempty output. Since map()
@@ -68,7 +70,7 @@
"-reducer", reduce,
//"-verbose",
//"-jobconf", "stream.debug=set"
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -100,11 +102,13 @@
outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
outFile.delete();
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- SCRIPT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ SCRIPT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java Sat Nov 28 20:26:01 2009
@@ -50,10 +50,10 @@
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", (failMap ? failingTask : echoTask),
"-reducer", (failMap ? echoTask : failingTask),
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.non.zero.exit.is.failure=" + exitStatusIsFailure,
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
- "-jobconf", "io.sort.mb=10"
+ "-jobconf", "mapreduce.task.io.sort.mb=10"
};
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Sat Nov 28 20:26:01 2009
@@ -51,7 +51,7 @@
"-reducer", reduce,
//"-verbose",
//"-jobconf", "stream.debug=set"
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
@@ -75,7 +75,11 @@
} catch(Exception e) {
// Expecting an exception
} finally {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Sat Nov 28 20:26:01 2009
@@ -21,6 +21,8 @@
import junit.framework.TestCase;
import java.io.*;
+import org.apache.hadoop.fs.FileUtil;
+
/**
* This class tests hadoopStreaming in MapReduce local mode.
* This testcase looks at different cases of tab position in input.
@@ -66,7 +68,7 @@
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", "cat",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.non.zero.exit.is.failure=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
@@ -78,7 +80,7 @@
File outFile = null;
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -97,12 +99,12 @@
} catch(Exception e) {
failTrace(e);
} finally {
- outFile.delete();
- File outFileCRC = new File(OUTPUT_DIR,
- "." + outFileName + ".crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ failTrace(e);
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java Sat Nov 28 20:26:01 2009
@@ -23,6 +23,7 @@
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
/**
@@ -36,7 +37,7 @@
protected File INPUT_FILE = new File("TestStreamingSeparator.input.txt");
protected File OUTPUT_DIR = new File("TestStreamingSeparator.out");
protected String input = "roses1are.red\nviolets1are.blue\nbunnies1are.pink\n";
- // key.value.separator.in.input.line reads 1 as separator
+ // mapreduce.input.keyvaluelinerecordreader.key.value.separator reads 1 as separator
// stream.map.input.field.separator uses 2 as separator
// map behaves like "/usr/bin/tr 2 3"; (translate 2 to 3)
protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
@@ -45,7 +46,7 @@
// reduce behaves like "/usr/bin/tr 3 4"; (translate 3 to 4)
protected String reduce = StreamUtil.makeJavaCommand(TrAppReduce.class, new String[]{"3", "4"});
// stream.reduce.output.field.separator recognize 4 as separator
- // mapred.textoutputformat.separator outputs 5 as separator
+ // mapreduce.output.textoutputformat.separator outputs 5 as separator
protected String outputExpect = "bunnies5are.pink\nroses5are.red\nviolets5are.blue\n";
private StreamJob job;
@@ -73,15 +74,15 @@
"-reducer", reduce,
//"-verbose",
//"-jobconf", "stream.debug=set"
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-inputformat", "KeyValueTextInputFormat",
- "-jobconf", "key.value.separator.in.input.line=1",
+ "-jobconf", "mapreduce.input.keyvaluelinerecordreader.key.value.separator=1",
"-jobconf", "stream.map.input.field.separator=2",
"-jobconf", "stream.map.output.field.separator=3",
"-jobconf", "stream.reduce.input.field.separator=3",
"-jobconf", "stream.reduce.output.field.separator=4",
- "-jobconf", "mapred.textoutputformat.separator=5",
+ "-jobconf", "mapreduce.output.textoutputformat.separator=5",
};
}
@@ -89,7 +90,7 @@
{
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -109,10 +110,12 @@
} catch(Exception e) {
failTrace(e);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
- INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ try {
+ INPUT_FILE.delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+ } catch (IOException e) {
+ failTrace(e);
+ }
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Sat Nov 28 20:26:01 2009
@@ -29,6 +29,8 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.TaskReport;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
/**
* Tests for the ability of a streaming task to set the status
@@ -49,11 +51,11 @@
"-input", INPUT_FILE,
"-output", OUTPUT_DIR,
"-mapper", map,
- "-jobconf", "mapred.map.tasks=1",
- "-jobconf", "mapred.reduce.tasks=0",
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", JobContext.NUM_MAPS + "=1",
+ "-jobconf", JobContext.NUM_REDUCES + "=0",
+ "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
- "-jobconf", "mapred.job.tracker=localhost:"+jobtrackerPort,
+ "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:"+jobtrackerPort,
"-jobconf", "fs.default.name=file:///"
};
}
@@ -80,7 +82,7 @@
MiniMRCluster mr = null;
FileSystem fs = null;
JobConf conf = new JobConf();
- conf.setBoolean("mapred.job.tracker.retire.jobs", false);
+ conf.setBoolean(JTConfig.JT_RETIREJOBS, false);
try {
mr = new MiniMRCluster(1, "file:///", 3, null , null, conf);
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java Sat Nov 28 20:26:01 2009
@@ -47,8 +47,8 @@
Integer.toString(duringLines),
Integer.toString(postLines)}),
"-reducer", StreamJob.REDUCE_NONE,
- "-jobconf", "keep.failed.task.files=true",
- "-jobconf", "mapred.task.timeout=5000",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
+ "-jobconf", "mapreduce.task.timeout=5000",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Sat Nov 28 20:26:01 2009
@@ -34,7 +34,8 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
/**
* This test case tests the symlink creation
* utility provided by distributed caching
@@ -69,7 +70,7 @@
mr = new MiniMRCluster(1, namenode, 3);
// During tests, the default Configuration will use a local mapred
// So don't specify -config or -cluster
- String strJobtracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+ String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
String strNamenode = "fs.default.name=" + namenode;
String argv[] = new String[] {
"-input", INPUT_FILE,
@@ -112,7 +113,8 @@
String line = null;
Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
new Path(OUTPUT_DIR),
- new OutputLogFilter()));
+ new Utils.OutputFileUtils
+ .OutputFilesFilter()));
for (int i = 0; i < fileList.length; i++){
System.out.println(fileList[i].toString());
BufferedReader bread =
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java Sat Nov 28 20:26:01 2009
@@ -24,6 +24,7 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
import junit.framework.TestCase;
@@ -54,7 +55,7 @@
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
- "-jobconf", "keep.failed.task.files=true",
+ "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
"-io", "typedbytes"
};
@@ -63,7 +64,7 @@
public void testCommandLine() throws Exception {
try {
try {
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
} catch (Exception e) {
}
@@ -84,10 +85,8 @@
System.err.println(" out1=" + output);
assertEquals(outputExpect, output);
} finally {
- File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
INPUT_FILE.delete();
- outFileCRC.delete();
- OUTPUT_DIR.getAbsoluteFile().delete();
+ FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
}
}
}
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Sat Nov 28 20:26:01 2009
@@ -27,6 +27,8 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.TestMiniMRWithDFS;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.util.StringUtils;
import junit.framework.TestCase;
@@ -55,9 +57,9 @@
"-mapper", map,
"-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
"-numReduceTasks", "0",
- "-jobconf", "mapred.map.tasks=1",
+ "-jobconf", JobContext.NUM_MAPS + "=1",
"-jobconf", JobConf.MAPRED_MAP_TASK_ULIMIT + "=" + memLimit,
- "-jobconf", "mapred.job.tracker=" + "localhost:" +
+ "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:" +
mr.getJobTrackerPort(),
"-jobconf", "fs.default.name=" + "hdfs://localhost:"
+ dfs.getNameNodePort(),
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java Sat Nov 28 20:26:01 2009
@@ -41,18 +41,18 @@
// test that some JobConf properties are exposed as expected
// Note the dots translated to underscore:
// property names have been escaped in PipeMapRed.safeEnvVarName()
- expectDefined("mapred_local_dir");
+ expectDefined("mapreduce_cluster_local_dir");
expect("mapred_output_format_class", "org.apache.hadoop.mapred.TextOutputFormat");
- expect("mapred_output_key_class", "org.apache.hadoop.io.Text");
- expect("mapred_output_value_class", "org.apache.hadoop.io.Text");
+ expect("mapreduce_job_output_key_class", "org.apache.hadoop.io.Text");
+ expect("mapreduce_job_output_value_class", "org.apache.hadoop.io.Text");
- expect("mapred_task_is_map", "true");
- expectDefined("mapred_task_id");
+ expect("mapreduce_task_ismap", "true");
+ expectDefined("mapreduce_task_attempt_id");
- expectDefined("map_input_file");
- expectDefined("map_input_length");
+ expectDefined("mapreduce_map_input_file");
+ expectDefined("mapreduce_map_input_length");
- expectDefined("io_sort_factor");
+ expectDefined("mapreduce_task_io_sort_factor");
// the FileSplit context properties are not available in local hadoop..
// so can't check them in this test.
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java Sat Nov 28 20:26:01 2009
@@ -41,17 +41,17 @@
// test that some JobConf properties are exposed as expected
// Note the dots translated to underscore:
// property names have been escaped in PipeMapRed.safeEnvVarName()
- expect("mapred_job_tracker", "local");
+ expect("mapreduce_jobtracker_address", "local");
//expect("mapred_local_dir", "build/test/mapred/local");
- expectDefined("mapred_local_dir");
+ expectDefined("mapreduce_cluster_local_dir");
expect("mapred_output_format_class", "org.apache.hadoop.mapred.TextOutputFormat");
- expect("mapred_output_key_class", "org.apache.hadoop.io.Text");
- expect("mapred_output_value_class", "org.apache.hadoop.io.Text");
+ expect("mapreduce_job_output_key_class", "org.apache.hadoop.io.Text");
+ expect("mapreduce_job_output_value_class", "org.apache.hadoop.io.Text");
- expect("mapred_task_is_map", "false");
- expectDefined("mapred_task_id");
+ expect("mapreduce_task_ismap", "false");
+ expectDefined("mapreduce_task_attempt_id");
- expectDefined("io_sort_factor");
+ expectDefined("mapreduce_task_io_sort_factor");
// the FileSplit context properties are not available in local hadoop..
// so can't check them in this test.
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java Sat Nov 28 20:26:01 2009
@@ -27,6 +27,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -120,6 +121,24 @@
istream.close();
}
+ public void testCustomTypesIO() throws IOException {
+ byte[] rawBytes = new byte[] { 100, 0, 0, 0, 3, 1, 2, 3 };
+
+ FileOutputStream ostream = new FileOutputStream(tmpfile);
+ DataOutputStream dostream = new DataOutputStream(ostream);
+ TypedBytesOutput out = new TypedBytesOutput(dostream);
+ out.writeRaw(rawBytes);
+ dostream.close();
+ ostream.close();
+
+ FileInputStream istream = new FileInputStream(tmpfile);
+ DataInputStream distream = new DataInputStream(istream);
+ TypedBytesInput in = new TypedBytesInput(distream);
+ assertTrue(Arrays.equals(rawBytes, in.readRaw()));
+ distream.close();
+ istream.close();
+ }
+
public void testRecordIO() throws IOException {
RecRecord1 r1 = new RecRecord1();
r1.setBoolVal(true);
Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,3 +1,3 @@
/hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
/hadoop/core/trunk/src/contrib/vaidya:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/vaidya:804974-807678
+/hadoop/mapreduce/trunk/src/contrib/vaidya:804974-884916
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/ivy.xml?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/ivy.xml (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/ivy.xml Sat Nov 28 20:26:01 2009
@@ -24,13 +24,8 @@
<artifact conf="master"/>
</publications>
<dependencies>
- <dependency org="commons-logging"
- name="commons-logging"
- rev="${commons-logging.version}"
- conf="common->default"/>
- <dependency org="log4j"
- name="log4j"
- rev="${log4j.version}"
- conf="common->master"/>
+ <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop-core.version}" conf="common->default"/>
+ <dependency org="log4j" name="log4j" rev="${log4j.version}" conf="common->master"/>
+ <dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}" conf="common->default"/>
</dependencies>
</ivy-module>
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java Sat Nov 28 20:26:01 2009
@@ -19,20 +19,21 @@
package org.apache.hadoop.vaidya.postexdiagnosis;
-import java.net.URL;
-import java.io.InputStream;
import java.io.FileInputStream;
+import java.io.InputStream;
+import java.net.URL;
+
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobHistory.JobInfo;
-import org.apache.hadoop.mapred.DefaultJobHistoryParser;
-import org.apache.hadoop.vaidya.util.XMLUtils;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.vaidya.DiagnosticTest;
import org.apache.hadoop.vaidya.JobDiagnoser;
import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
-import org.w3c.dom.NodeList;
+import org.apache.hadoop.vaidya.util.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
/**
@@ -108,15 +109,14 @@
* Read the job information necessary for post performance analysis
*/
JobConf jobConf = new JobConf();
- JobInfo jobInfo = new JobInfo("");
- readJobInformation(jobConf, jobInfo);
+ JobInfo jobInfo = readJobInformation(jobConf);
this._jobExecutionStatistics = new JobStatistics(jobConf, jobInfo);
}
/**
* read and populate job statistics information.
*/
- private void readJobInformation(JobConf jobConf, JobInfo jobInfo) throws Exception {
+ private JobInfo readJobInformation(JobConf jobConf) throws Exception {
/*
* Convert the input strings to URL
@@ -132,13 +132,21 @@
/*
* Read JobHistoryFile and build job counters to evaluate diagnostic rules
*/
+ JobHistoryParser parser;
+ JobInfo jobInfo;
if (jobHistoryFileUrl.getProtocol().equals("hdfs")) {
- DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(), jobInfo, FileSystem.get(jobConf));
+ parser = new JobHistoryParser(FileSystem.get(jobConf),
+ jobHistoryFileUrl.getPath());
+ jobInfo = parser.parse();
} else if (jobHistoryFileUrl.getProtocol().equals("file")) {
- DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(), jobInfo, FileSystem.getLocal(jobConf));
+ parser = new JobHistoryParser(FileSystem.getLocal(jobConf),
+ jobHistoryFileUrl.getPath());
+ jobInfo = parser.parse();
} else {
- throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
+ throw new Exception("Malformed URL. Protocol: "+
+ jobHistoryFileUrl.getProtocol());
}
+ return jobInfo;
}
/*
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java Sat Nov 28 20:26:01 2009
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.vaidya.postexdiagnosis.tests;
+import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.KeyDataType;
@@ -98,9 +99,14 @@
public String getPrescription() {
return
"* Use combiner to lower the map output size.\n" +
- "* Increase map side sort buffer size (io.sort.mb:"+this._job.getJobConf().getInt("io.sort.mb", 0) + ").\n" +
- "* Increase index buffer size (io.sort.record.percent:"+ this._job.getJobConf().getInt("io.sort.record.percent", 0) + ") if number of Map Output Records are large. \n" +
- "* Increase (io.sort.spill.percent:"+ this._job.getJobConf().getInt("io.sort.spill.percent", 0) + "), default 0.80 i.e. 80% of sort buffer size and index buffer size. \n";
+ "* Increase map side sort buffer size (" + JobContext.IO_SORT_FACTOR +
+ ":" + this._job.getJobConf().getInt(JobContext.IO_SORT_MB, 0) + ").\n" +
+ "* Increase index buffer size (" + JobContext.MAP_SORT_RECORD_PERCENT +
+ ":" + this._job.getJobConf().getInt(JobContext.MAP_SORT_RECORD_PERCENT, 0)
+ + ") if number of Map Output Records are large. \n" +
+ "* Increase (" + JobContext.MAP_SORT_SPILL_PERCENT + ":" +
+ this._job.getJobConf().getInt(JobContext.MAP_SORT_SPILL_PERCENT, 0) +
+ "), default 0.80 i.e. 80% of sort buffer size and index buffer size. \n";
}
/* (non-Javadoc)
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java Sat Nov 28 20:26:01 2009
@@ -17,26 +17,23 @@
*/
package org.apache.hadoop.vaidya.statistics.job;
-import java.util.ArrayList;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobHistory;
-import org.apache.hadoop.mapred.JobHistory.JobInfo;
-import org.apache.hadoop.mapred.JobHistory.Keys;
-import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.Counters.Counter;
import java.text.ParseException;
-
-//import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
-
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.Hashtable;
import java.util.Map;
import java.util.regex.Pattern;
-import java.util.regex.Matcher;
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Collections;
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapred.TaskStatus;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
/**
*
@@ -65,7 +62,7 @@
/*
* Aggregated Job level counters
*/
- private JobHistory.JobInfo _jobInfo;
+ private JobHistoryParser.JobInfo _jobInfo;
/*
* Job stats
@@ -152,8 +149,9 @@
this._jobConf = jobConf;
this._jobInfo = jobInfo;
this._job = new Hashtable<Enum, String>();
- populate_Job(this._job, this._jobInfo.getValues());
- populate_MapReduceTaskLists(this._mapTaskList, this._reduceTaskList, this._jobInfo.getAllTasks());
+ populate_Job(this._job, jobInfo);
+ populate_MapReduceTaskLists(this._mapTaskList, this._reduceTaskList,
+ jobInfo.getAllTasks());
// Add the Job Type: MAP_REDUCE, MAP_ONLY
if (getLongValue(JobKeys.TOTAL_REDUCES) == 0) {
@@ -167,120 +165,100 @@
*
*/
private void populate_MapReduceTaskLists (ArrayList<MapTaskStatistics> mapTaskList,
- ArrayList<ReduceTaskStatistics> reduceTaskList,
- java.util.Map<String, JobHistory.Task> taskMap) throws ParseException {
- /*
- *
- */
+ ArrayList<ReduceTaskStatistics> reduceTaskList,
+ Map<TaskID, TaskInfo> taskMap)
+ throws ParseException {
int num_tasks = taskMap.entrySet().size();
- java.util.Iterator<Map.Entry<String, JobHistory.Task>> ti = taskMap.entrySet().iterator();
- for (int i = 0; i < num_tasks; i++)
- {
- Map.Entry<String, JobHistory.Task> entry = (Map.Entry<String, JobHistory.Task>) ti.next();
- JobHistory.Task task = entry.getValue();
- if (task.get(Keys.TASK_TYPE).equals("MAP")) {
- MapTaskStatistics mapT = new MapTaskStatistics();
- java.util.Map<JobHistory.Keys, String> mapTask = task.getValues();
- java.util.Map<JobHistory.Keys, String> successTaskAttemptMap = getLastSuccessfulTaskAttempt(task);
- // NOTE: Following would lead to less number of actual tasks collected in the tasklist array
- if (successTaskAttemptMap != null) {
- mapTask.putAll(successTaskAttemptMap);
- } else {
- System.err.println("Task:<"+task.get(Keys.TASKID)+"> is not successful - SKIPPING");
- }
- int size = mapTask.size();
- java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = mapTask.entrySet().iterator();
- for (int j = 0; j < size; j++)
- {
- Map.Entry<JobHistory.Keys, String> mtc = kv.next();
- JobHistory.Keys key = mtc.getKey();
- String value = mtc.getValue();
- //System.out.println("JobHistory.MapKeys."+key+": "+value);
- switch (key) {
- case TASKID: mapT.setValue(MapTaskKeys.TASK_ID, value); break;
- case TASK_ATTEMPT_ID: mapT.setValue(MapTaskKeys.ATTEMPT_ID, value); break;
- case HOSTNAME: mapT.setValue(MapTaskKeys.HOSTNAME, value); break;
- case TASK_TYPE: mapT.setValue(MapTaskKeys.TASK_TYPE, value); break;
- case TASK_STATUS: mapT.setValue(MapTaskKeys.STATUS, value); break;
- case START_TIME: mapT.setValue(MapTaskKeys.START_TIME, value); break;
- case FINISH_TIME: mapT.setValue(MapTaskKeys.FINISH_TIME, value); break;
- case SPLITS: mapT.setValue(MapTaskKeys.SPLITS, value); break;
- case TRACKER_NAME: mapT.setValue(MapTaskKeys.TRACKER_NAME, value); break;
- case STATE_STRING: mapT.setValue(MapTaskKeys.STATE_STRING, value); break;
- case HTTP_PORT: mapT.setValue(MapTaskKeys.HTTP_PORT, value); break;
- case ERROR: mapT.setValue(MapTaskKeys.ERROR, value); break;
- case COUNTERS:
- value.concat(",");
- parseAndAddMapTaskCounters(mapT, value);
- mapTaskList.add(mapT);
- break;
- default: System.err.println("JobHistory.MapKeys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR MAP COUNTERS");
- break;
- }
- }
-
- // Add number of task attempts
- mapT.setValue(MapTaskKeys.NUM_ATTEMPTS, (new Integer(task.getTaskAttempts().size())).toString());
+// DO we need these lists?
+// List<TaskAttemptInfo> successfulMapAttemptList =
+// new ArrayList<TaskAttemptInfo>();
+// List<TaskAttemptInfo> successfulReduceAttemptList =
+// new ArrayList<TaskAttemptInfo>();
+ for (JobHistoryParser.TaskInfo taskInfo: taskMap.values()) {
+ if (taskInfo.getTaskType().equals(TaskType.MAP)) {
+ MapTaskStatistics mapT = new MapTaskStatistics();
+ TaskAttemptInfo successfulAttempt =
+ getLastSuccessfulTaskAttempt(taskInfo);
+ mapT.setValue(MapTaskKeys.TASK_ID,
+ successfulAttempt.getAttemptId().getTaskID().toString());
+ mapT.setValue(MapTaskKeys.ATTEMPT_ID,
+ successfulAttempt.getAttemptId().toString());
+ mapT.setValue(MapTaskKeys.HOSTNAME,
+ successfulAttempt.getTrackerName());
+ mapT.setValue(MapTaskKeys.TASK_TYPE,
+ successfulAttempt.getTaskType().toString());
+ mapT.setValue(MapTaskKeys.STATUS,
+ successfulAttempt.getTaskStatus().toString());
+ mapT.setValue(MapTaskKeys.START_TIME, successfulAttempt.getStartTime());
+ mapT.setValue(MapTaskKeys.FINISH_TIME, successfulAttempt.getFinishTime());
+ mapT.setValue(MapTaskKeys.SPLITS, taskInfo.getSplitLocations());
+ mapT.setValue(MapTaskKeys.TRACKER_NAME, successfulAttempt.getTrackerName());
+ mapT.setValue(MapTaskKeys.STATE_STRING, successfulAttempt.getState());
+ mapT.setValue(MapTaskKeys.HTTP_PORT, successfulAttempt.getHttpPort());
+ mapT.setValue(MapTaskKeys.ERROR, successfulAttempt.getError());
+ parseAndAddMapTaskCounters(mapT,
+ successfulAttempt.getCounters().toString());
+ mapTaskList.add(mapT);
- // Add EXECUTION_TIME = FINISH_TIME - START_TIME
- long etime = mapT.getLongValue(MapTaskKeys.FINISH_TIME) - mapT.getLongValue(MapTaskKeys.START_TIME);
- mapT.setValue(MapTaskKeys.EXECUTION_TIME, (new Long(etime)).toString());
-
- }else if (task.get(Keys.TASK_TYPE).equals("REDUCE")) {
+ // Add number of task attempts
+ mapT.setValue(MapTaskKeys.NUM_ATTEMPTS,
+ (new Integer(taskInfo.getAllTaskAttempts().size())).toString());
+
+ // Add EXECUTION_TIME = FINISH_TIME - START_TIME
+ long etime = mapT.getLongValue(MapTaskKeys.FINISH_TIME) -
+ mapT.getLongValue(MapTaskKeys.START_TIME);
+ mapT.setValue(MapTaskKeys.EXECUTION_TIME, (new Long(etime)).toString());
+
+ }else if (taskInfo.getTaskType().equals(TaskType.REDUCE)) {
ReduceTaskStatistics reduceT = new ReduceTaskStatistics();
- java.util.Map<JobHistory.Keys, String> reduceTask = task.getValues();
- java.util.Map<JobHistory.Keys, String> successTaskAttemptMap = getLastSuccessfulTaskAttempt(task);
- // NOTE: Following would lead to less number of actual tasks collected in the tasklist array
- if (successTaskAttemptMap != null) {
- reduceTask.putAll(successTaskAttemptMap);
- } else {
- System.err.println("Task:<"+task.get(Keys.TASKID)+"> is not successful - SKIPPING");
- }
- int size = reduceTask.size();
- java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = reduceTask.entrySet().iterator();
- for (int j = 0; j < size; j++)
- {
- Map.Entry<JobHistory.Keys, String> rtc = kv.next();
- JobHistory.Keys key = rtc.getKey();
- String value = rtc.getValue();
- //System.out.println("JobHistory.ReduceKeys."+key+": "+value);
- switch (key) {
- case TASKID: reduceT.setValue(ReduceTaskKeys.TASK_ID, value); break;
- case TASK_ATTEMPT_ID: reduceT.setValue(ReduceTaskKeys.ATTEMPT_ID, value); break;
- case HOSTNAME: reduceT.setValue(ReduceTaskKeys.HOSTNAME, value); break;
- case TASK_TYPE: reduceT.setValue(ReduceTaskKeys.TASK_TYPE, value); break;
- case TASK_STATUS: reduceT.setValue(ReduceTaskKeys.STATUS, value); break;
- case START_TIME: reduceT.setValue(ReduceTaskKeys.START_TIME, value); break;
- case FINISH_TIME: reduceT.setValue(ReduceTaskKeys.FINISH_TIME, value); break;
- case SHUFFLE_FINISHED: reduceT.setValue(ReduceTaskKeys.SHUFFLE_FINISH_TIME, value); break;
- case SORT_FINISHED: reduceT.setValue(ReduceTaskKeys.SORT_FINISH_TIME, value); break;
- case SPLITS: reduceT.setValue(ReduceTaskKeys.SPLITS, value); break;
- case TRACKER_NAME: reduceT.setValue(ReduceTaskKeys.TRACKER_NAME, value); break;
- case STATE_STRING: reduceT.setValue(ReduceTaskKeys.STATE_STRING, value); break;
- case HTTP_PORT: reduceT.setValue(ReduceTaskKeys.HTTP_PORT, value); break;
- case COUNTERS:
- value.concat(",");
- parseAndAddReduceTaskCounters(reduceT, value);
- reduceTaskList.add(reduceT);
- break;
- default: System.err.println("JobHistory.ReduceKeys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR REDUCE COUNTERS");
- break;
- }
- }
+ TaskAttemptInfo successfulAttempt =
+ getLastSuccessfulTaskAttempt(taskInfo);
+ reduceT.setValue(ReduceTaskKeys.TASK_ID,
+ successfulAttempt.getAttemptId().getTaskID().toString());
+ reduceT.setValue(ReduceTaskKeys.ATTEMPT_ID,
+ successfulAttempt.getAttemptId().toString());
+ reduceT.setValue(ReduceTaskKeys.HOSTNAME,
+ successfulAttempt.getTrackerName());
+ reduceT.setValue(ReduceTaskKeys.TASK_TYPE,
+ successfulAttempt.getTaskType().toString());
+ reduceT.setValue(ReduceTaskKeys.STATUS,
+ successfulAttempt.getTaskStatus().toString());
+ reduceT.setValue(ReduceTaskKeys.START_TIME,
+ successfulAttempt.getStartTime());
+ reduceT.setValue(ReduceTaskKeys.FINISH_TIME,
+ successfulAttempt.getFinishTime());
+ reduceT.setValue(ReduceTaskKeys.SHUFFLE_FINISH_TIME,
+ successfulAttempt.getShuffleFinishTime());
+ reduceT.setValue(ReduceTaskKeys.SORT_FINISH_TIME,
+ successfulAttempt.getSortFinishTime());
+ reduceT.setValue(ReduceTaskKeys.SPLITS, "");
+ reduceT.setValue(ReduceTaskKeys.TRACKER_NAME,
+ successfulAttempt.getTrackerName());
+ reduceT.setValue(ReduceTaskKeys.STATE_STRING,
+ successfulAttempt.getState());
+ reduceT.setValue(ReduceTaskKeys.HTTP_PORT,
+ successfulAttempt.getHttpPort());
+ parseAndAddReduceTaskCounters(reduceT,
+ successfulAttempt.getCounters().toString());
+
+ reduceTaskList.add(reduceT);
// Add number of task attempts
- reduceT.setValue(ReduceTaskKeys.NUM_ATTEMPTS, (new Integer(task.getTaskAttempts().size())).toString());
+ reduceT.setValue(ReduceTaskKeys.NUM_ATTEMPTS,
+ (new Integer(taskInfo.getAllTaskAttempts().size())).toString());
// Add EXECUTION_TIME = FINISH_TIME - START_TIME
- long etime1 = reduceT.getLongValue(ReduceTaskKeys.FINISH_TIME) - reduceT.getLongValue(ReduceTaskKeys.START_TIME);
- reduceT.setValue(ReduceTaskKeys.EXECUTION_TIME, (new Long(etime1)).toString());
+ long etime1 = reduceT.getLongValue(ReduceTaskKeys.FINISH_TIME) -
+ reduceT.getLongValue(ReduceTaskKeys.START_TIME);
+ reduceT.setValue(ReduceTaskKeys.EXECUTION_TIME,
+ (new Long(etime1)).toString());
- } else if (task.get(Keys.TASK_TYPE).equals("CLEANUP") ||
- task.get(Keys.TASK_TYPE).equals("SETUP")) {
+ } else if (taskInfo.getTaskType().equals(TaskType.JOB_CLEANUP) ||
+ taskInfo.getTaskType().equals(TaskType.JOB_SETUP)) {
//System.out.println("INFO: IGNORING TASK TYPE : "+task.get(Keys.TASK_TYPE));
} else {
- System.err.println("UNKNOWN TASK TYPE : "+task.get(Keys.TASK_TYPE));
+ System.err.println("UNKNOWN TASK TYPE : "+taskInfo.getTaskType());
}
}
}
@@ -288,62 +266,40 @@
/*
* Get last successful task attempt to be added in the stats
*/
- private java.util.Map<JobHistory.Keys, String> getLastSuccessfulTaskAttempt(JobHistory.Task task) {
+ private TaskAttemptInfo getLastSuccessfulTaskAttempt(TaskInfo task) {
- Map<String, JobHistory.TaskAttempt> taskAttempts = task.getTaskAttempts();
- int size = taskAttempts.size();
- java.util.Iterator<Map.Entry<String, JobHistory.TaskAttempt>> kv = taskAttempts.entrySet().iterator();
- for (int i=0; i<size; i++) {
- // CHECK_IT: Only one SUCCESSFUL TASK ATTEMPT
- Map.Entry<String, JobHistory.TaskAttempt> tae = kv.next();
- JobHistory.TaskAttempt attempt = tae.getValue();
- if (attempt.getValues().get(JobHistory.Keys.TASK_STATUS).equals("SUCCESS")) {
- return attempt.getValues();
+ for (TaskAttemptInfo ai: task.getAllTaskAttempts().values()) {
+ if (ai.getTaskStatus().equals(TaskStatus.State.SUCCEEDED.toString())) {
+ return ai;
}
}
-
return null;
}
/*
* Popuate the job stats
*/
- private void populate_Job (Hashtable<Enum, String> job, java.util.Map<JobHistory.Keys, String> jobC) throws ParseException {
- int size = jobC.size();
- java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = jobC.entrySet().iterator();
- for (int i = 0; i < size; i++)
- {
- Map.Entry<JobHistory.Keys, String> entry = (Map.Entry<JobHistory.Keys, String>) kv.next();
- JobHistory.Keys key = entry.getKey();
- String value = entry.getValue();
- //System.out.println("JobHistory.JobKeys."+key+": "+value);
- switch (key) {
- case JOBTRACKERID: job.put(JobKeys.JOBTRACKERID, value); break;
- case FINISH_TIME: job.put(JobKeys.FINISH_TIME, value); break;
- case JOBID: job.put(JobKeys.JOBID, value); break;
- case JOBNAME: job.put(JobKeys.JOBNAME, value); break;
- case USER: job.put(JobKeys.USER, value); break;
- case JOBCONF: job.put(JobKeys.JOBCONF, value); break;
- case SUBMIT_TIME: job.put(JobKeys.SUBMIT_TIME, value); break;
- case LAUNCH_TIME: job.put(JobKeys.LAUNCH_TIME, value); break;
- case TOTAL_MAPS: job.put(JobKeys.TOTAL_MAPS, value); break;
- case TOTAL_REDUCES: job.put(JobKeys.TOTAL_REDUCES, value); break;
- case FAILED_MAPS: job.put(JobKeys.FAILED_MAPS, value); break;
- case FAILED_REDUCES: job.put(JobKeys.FAILED_REDUCES, value); break;
- case FINISHED_MAPS: job.put(JobKeys.FINISHED_MAPS, value); break;
- case FINISHED_REDUCES: job.put(JobKeys.FINISHED_REDUCES, value); break;
- case JOB_STATUS: job.put(JobKeys.STATUS, value); break;
- case JOB_PRIORITY: job.put(JobKeys.JOB_PRIORITY, value); break;
- case COUNTERS:
- value.concat(",");
- parseAndAddJobCounters(job, value);
- break;
- default: System.err.println("JobHistory.Keys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR COUNTERS");
- break;
- }
- }
+ private void populate_Job (Hashtable<Enum, String> job, JobInfo jobInfo) throws ParseException {
+ job.put(JobKeys.FINISH_TIME, String.valueOf(jobInfo.getFinishTime()));
+ job.put(JobKeys.JOBID, jobInfo.getJobId().toString());
+ job.put(JobKeys.JOBNAME, jobInfo.getJobname());
+ job.put(JobKeys.USER, jobInfo.getUsername());
+ job.put(JobKeys.JOBCONF, jobInfo.getJobConfPath());
+ job.put(JobKeys.SUBMIT_TIME, String.valueOf(jobInfo.getSubmitTime()));
+ job.put(JobKeys.LAUNCH_TIME, String.valueOf(jobInfo.getLaunchTime()));
+ job.put(JobKeys.TOTAL_MAPS, String.valueOf(jobInfo.getTotalMaps()));
+ job.put(JobKeys.TOTAL_REDUCES, String.valueOf(jobInfo.getTotalReduces()));
+ job.put(JobKeys.FAILED_MAPS, String.valueOf(jobInfo.getFailedMaps()));
+ job.put(JobKeys.FAILED_REDUCES, String.valueOf(jobInfo.getFailedReduces()));
+ job.put(JobKeys.FINISHED_MAPS, String.valueOf(jobInfo.getFinishedMaps()));
+ job.put(JobKeys.FINISHED_REDUCES,
+ String.valueOf(jobInfo.getFinishedReduces()));
+ job.put(JobKeys.STATUS, jobInfo.getJobStatus().toString());
+ job.put(JobKeys.JOB_PRIORITY, jobInfo.getPriority());
+ parseAndAddJobCounters(job, jobInfo.getTotalCounters().toString());
}
+
/*
* Parse and add the job counters
*/
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh Sat Nov 28 20:26:01 2009
@@ -42,6 +42,6 @@
exit -1;
fi
-hadoopVersion=`$HADOOP_HOME/bin/hadoop version | awk 'BEGIN { RS = "" ; FS = "\n" } ; { print $1 }' | awk '{print $2}'`
+hadoopVersion=`$HADOOP_HOME/bin/hadoop version | grep Hadoop | awk '{print $2}'`
$JAVA_HOME/bin/java -Xmx1024m -classpath $HADOOP_HOME/hadoop-${hadoopVersion}-core.jar:$HADOOP_HOME/contrib/vaidya/hadoop-${hadoopVersion}-vaidya.jar:$HADOOP_HOME/lib/commons-logging-1.0.4.jar:${CLASSPATH} org.apache.hadoop.vaidya.postexdiagnosis.PostExPerformanceDiagnoser $@