You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by om...@apache.org on 2010/05/01 00:26:21 UTC

svn commit: r939849 [1/3] - in /hadoop/mapreduce/trunk: ./ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/jo...

Author: omalley
Date: Fri Apr 30 22:26:19 2010
New Revision: 939849

URL: http://svn.apache.org/viewvc?rev=939849&view=rev
Log:
MAPREDUCE-1749. Move configuration strings out of JobContext so that it
can be made public stable. (omalley)

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java
    hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java
    hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/AdminOperationsProtocol.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/UserLogCleaner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobACL.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/TrackerDistributedCacheManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorBaseDescriptor.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorJob.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/BigDecimalSplitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/DateSplitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/FloatSplitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/IntegerSplitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/TextSplitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/TokenCache.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleScheduler.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/conf/TestJobConf.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MRCaching.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupWorkDir.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestUserLogCleanup.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Fri Apr 30 22:26:19 2010
@@ -281,6 +281,9 @@ Trunk (unreleased changes)
     MAPREDUCE-1568. TrackerDistributedCacheManager should clean up cache
     in a background thread. (Scott Chen via zshao)
 
+    MAPREDUCE-1749. Move configuration strings out of JobContext so that it
+    can be made public stable. (omalley)
+
   OPTIMIZATIONS
 
     MAPREDUCE-270. Fix the tasktracker to optionally send an out-of-band

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java Fri Apr 30 22:26:19 2010
@@ -58,7 +58,7 @@ public class CombinerJobCreator {
         return null;
       }
     }
-    conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
     conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
 
     Job job = new Job(conf);

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java Fri Apr 30 22:26:19 2010
@@ -92,7 +92,7 @@ public class GenericMRLoadJobCreator ext
       }
     }
 
-    conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
     conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
     return job;
   }

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java Fri Apr 30 22:26:19 2010
@@ -127,7 +127,7 @@ public class GridMixRunner {
       try {
         Configuration conf = StreamJob.createJob(args);
         conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
-        conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+        conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
         Job job = new Job(conf, "GridmixStreamingSorter." + size);
         ControlledJob cjob = new ControlledJob(job, null);
         gridmix.addJob(cjob);
@@ -150,7 +150,7 @@ public class GridMixRunner {
       try {
         Configuration conf = new Configuration();
         conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
-        conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+        conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
         Job job = new Job(conf);
         job.setJarByClass(Sort.class);
         job.setJobName("GridmixJavaSorter." + size);

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java Fri Apr 30 22:26:19 2010
@@ -21,7 +21,7 @@ package org.apache.hadoop.mapred;
 import java.util.Properties;
 
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
@@ -107,7 +107,7 @@ public class TestCapacitySchedulerWithJo
 
     JobConf conf = getJobConf();
     conf.setSpeculativeExecution(false);
-    conf.set(JobContext.SETUP_CLEANUP_NEEDED, "false");
+    conf.set(MRJobConfig.SETUP_CLEANUP_NEEDED, "false");
     conf.setNumTasksToExecutePerJvm(-1);
     conf.setQueueName(queues[0]);
     SleepJob sleepJob1 = new SleepJob();

Modified: hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java Fri Apr 30 22:26:19 2010
@@ -25,7 +25,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 /**
  * This abstract class serves as the base class for the mapper class of a data
@@ -56,7 +56,7 @@ public abstract class DataJoinMapperBase
   public void configure(JobConf job) {
     super.configure(job);
     this.job = job;
-    this.inputFile = job.get(JobContext.MAP_INPUT_FILE);
+    this.inputFile = job.get(MRJobConfig.MAP_INPUT_FILE);
     this.inputTag = generateInputTag(this.inputFile);
   }
 

Modified: hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java Fri Apr 30 22:26:19 2010
@@ -24,7 +24,7 @@ import org.apache.hadoop.contrib.index.e
 import org.apache.hadoop.contrib.index.example.LineDocLocalAnalysis;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 
@@ -64,7 +64,7 @@ public class IndexUpdateConfiguration {
    * @return the IO sort space in MB
    */
   public int getIOSortMB() {
-    return conf.getInt(JobContext.IO_SORT_MB, 100);
+    return conf.getInt(MRJobConfig.IO_SORT_MB, 100);
   }
 
   /**
@@ -72,7 +72,7 @@ public class IndexUpdateConfiguration {
    * @param mb  the IO sort space in MB
    */
   public void setIOSortMB(int mb) {
-    conf.setInt(JobContext.IO_SORT_MB, mb);
+    conf.setInt(MRJobConfig.IO_SORT_MB, mb);
   }
 
   /**

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java Fri Apr 30 22:26:19 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.streaming.io.InputWriter;
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.streaming.io.TextInputWriter;
@@ -69,7 +69,7 @@ public class PipeMapper extends PipeMapR
     //processed records could be different(equal or less) than the no of 
     //records input.
     SkipBadRecords.setAutoIncrMapperProcCount(job, false);
-    skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
+    skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false);
     if (mapInputWriterClass_.getCanonicalName().equals(TextInputWriter.class.getCanonicalName())) {
       String inputFormatClassName = job.getClass("mapred.input.format.class", TextInputFormat.class).getCanonicalName();
       ignoreKey = inputFormatClassName.equals(TextInputFormat.class.getCanonicalName());

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Fri Apr 30 22:26:19 2010
@@ -28,7 +28,7 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.streaming.io.InputWriter;
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.StringUtils;
@@ -70,7 +70,7 @@ public class PipeReducer extends PipeMap
     //processed records could be different(equal or less) than the no of 
     //records input.
     SkipBadRecords.setAutoIncrReducerProcCount(job, false);
-    skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
+    skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false);
 
     try {
       reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t").getBytes("UTF-8");

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Fri Apr 30 22:26:19 2010
@@ -45,7 +45,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.fs.Path;
@@ -271,14 +271,14 @@ public class StreamJob implements Tool {
       values = cmdLine.getOptionValues("file");
       if (values != null && values.length > 0) {
         StringBuilder unpackRegex = new StringBuilder(
-          config_.getPattern(JobContext.JAR_UNPACK_PATTERN,
+          config_.getPattern(MRJobConfig.JAR_UNPACK_PATTERN,
                              JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern());
         for (String file : values) {
           packageFiles_.add(file);
           String fname = new File(file).getName();
           unpackRegex.append("|(?:").append(Pattern.quote(fname)).append(")");
         }
-        config_.setPattern(JobContext.JAR_UNPACK_PATTERN,
+        config_.setPattern(MRJobConfig.JAR_UNPACK_PATTERN,
                            Pattern.compile(unpackRegex.toString()));
         validate(packageFiles_);
       }
@@ -511,7 +511,7 @@ public class StreamJob implements Tool {
     System.out.println("  The location of this working directory is unspecified.");
     System.out.println();
     System.out.println("To set the number of reduce tasks (num. of output files):");
-    System.out.println("  -D " + JobContext.NUM_REDUCES + "=10");
+    System.out.println("  -D " + MRJobConfig.NUM_REDUCES + "=10");
     System.out.println("To skip the sort/combine/shuffle/sort/reduce step:");
     System.out.println("  Use -numReduceTasks 0");
     System.out
@@ -522,11 +522,11 @@ public class StreamJob implements Tool {
     System.out.println("  This equivalent -reducer NONE");
     System.out.println();
     System.out.println("To speed up the last maps:");
-    System.out.println("  -D " + JobContext.MAP_SPECULATIVE + "=true");
+    System.out.println("  -D " + MRJobConfig.MAP_SPECULATIVE + "=true");
     System.out.println("To speed up the last reduces:");
-    System.out.println("  -D " + JobContext.REDUCE_SPECULATIVE + "=true");
+    System.out.println("  -D " + MRJobConfig.REDUCE_SPECULATIVE + "=true");
     System.out.println("To name the job (appears in the JobTracker Web UI):");
-    System.out.println("  -D " + JobContext.JOB_NAME + "='My Job'");
+    System.out.println("  -D " + MRJobConfig.JOB_NAME + "='My Job'");
     System.out.println("To change the local temp directory:");
     System.out.println("  -D dfs.data.dir=/tmp/dfs");
     System.out.println("  -D stream.tmpdir=/tmp/streaming");

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java Fri Apr 30 22:26:19 2010
@@ -36,7 +36,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /** Utilities not available elsewhere in Hadoop.
@@ -422,13 +422,13 @@ public class StreamUtil {
   // JobConf helpers
 
   public static FileSplit getCurrentSplit(JobConf job) {
-    String path = job.get(JobContext.MAP_INPUT_FILE);
+    String path = job.get(MRJobConfig.MAP_INPUT_FILE);
     if (path == null) {
       return null;
     }
     Path p = new Path(path);
-    long start = Long.parseLong(job.get(JobContext.MAP_INPUT_START));
-    long length = Long.parseLong(job.get(JobContext.MAP_INPUT_PATH));
+    long start = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_START));
+    long length = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_PATH));
     return new FileSplit(p, start, length, job);
   }
 
@@ -447,10 +447,10 @@ public class StreamUtil {
   public static TaskId getTaskInfo(JobConf job) {
     TaskId res = new TaskId();
 
-    String id = job.get(JobContext.TASK_ATTEMPT_ID);
+    String id = job.get(MRJobConfig.TASK_ATTEMPT_ID);
     if (isLocalJobTracker(job)) {
       // it uses difft naming 
-      res.mapTask = job.getBoolean(JobContext.TASK_ISMAP, true);
+      res.mapTask = job.getBoolean(MRJobConfig.TASK_ISMAP, true);
       res.jobid = "0";
       res.taskid = 0;
       res.execid = 0;

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Fri Apr 30 22:26:19 2010
@@ -23,7 +23,7 @@ import static org.junit.Assert.*;
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
@@ -65,7 +65,7 @@ public class TestStreamAggregate
       "-reducer", "aggregate",
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
+      "-jobconf", MRJobConfig.PRESERVE_FAILED_TASK_FILES + "=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Fri Apr 30 22:26:19 2010
@@ -24,7 +24,7 @@ import static org.junit.Assert.*;
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
@@ -71,7 +71,7 @@ public class TestStreamingKeyValue
       "-input", INPUT_FILE.getAbsolutePath(),
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", "cat",
-      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true", 
+      "-jobconf", MRJobConfig.PRESERVE_FAILED_TASK_FILES + "=true", 
       "-jobconf", "stream.non.zero.exit.is.failure=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Fri Apr 30 22:26:19 2010
@@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TaskReport;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 
@@ -53,9 +53,9 @@ public class TestStreamingStatus {
       "-input", INPUT_FILE,
       "-output", OUTPUT_DIR,
       "-mapper", map,
-      "-jobconf", JobContext.NUM_MAPS + "=1",
-      "-jobconf", JobContext.NUM_REDUCES + "=0",      
-      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
+      "-jobconf", MRJobConfig.NUM_MAPS + "=1",
+      "-jobconf", MRJobConfig.NUM_REDUCES + "=0",      
+      "-jobconf", MRJobConfig.PRESERVE_FAILED_TASK_FILES + "=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
       "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:"+jobtrackerPort,
       "-jobconf", "fs.default.name=file:///"

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Fri Apr 30 22:26:19 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.StringUtils;
 
@@ -58,7 +58,7 @@ public class TestUlimit {
       "-mapper", map,
       "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
       "-numReduceTasks", "0",
-      "-jobconf", JobContext.NUM_MAPS + "=1",
+      "-jobconf", MRJobConfig.NUM_MAPS + "=1",
       "-jobconf", JobConf.MAPRED_MAP_TASK_ULIMIT + "=" + memLimit,
       "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:" +
                                            mr.getJobTrackerPort(),

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java Fri Apr 30 22:26:19 2010
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.vaidya.postexdiagnosis.tests;
 
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
 import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
 import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.KeyDataType;
@@ -99,11 +99,11 @@ public class MapSideDiskSpill extends Di
   public String getPrescription() {
     return 
     "* Use combiner to lower the map output size.\n" +
-      "* Increase map side sort buffer size (" + JobContext.IO_SORT_MB + 
-      ":" + this._job.getJobConf().getInt(JobContext.IO_SORT_MB, 0) + ").\n" +
+      "* Increase map side sort buffer size (" + MRJobConfig.IO_SORT_MB + 
+      ":" + this._job.getJobConf().getInt(MRJobConfig.IO_SORT_MB, 0) + ").\n" +
       ") if number of Map Output Records are large. \n" +
-      "* Increase (" + JobContext.MAP_SORT_SPILL_PERCENT + ":" + 
-      this._job.getJobConf().getInt(JobContext.MAP_SORT_SPILL_PERCENT, 0) + 
+      "* Increase (" + MRJobConfig.MAP_SORT_SPILL_PERCENT + ":" + 
+      this._job.getJobConf().getInt(MRJobConfig.MAP_SORT_SPILL_PERCENT, 0) + 
       "), default 0.80 i.e. 80% of sort buffer size. \n";
   }
 

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java Fri Apr 30 22:26:19 2010
@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.Reducer;
@@ -328,11 +329,11 @@ public class BaileyBorweinPlouffe extend
     job.setInputFormatClass(BbpInputFormat.class);
 
     // disable task timeout
-    jobconf.setLong(JobContext.TASK_TIMEOUT, 0);
+    jobconf.setLong(MRJobConfig.TASK_TIMEOUT, 0);
 
     // do not use speculative execution
-    jobconf.setBoolean(JobContext.MAP_SPECULATIVE, false);
-    jobconf.setBoolean(JobContext.REDUCE_SPECULATIVE, false);
+    jobconf.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
+    jobconf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
     return job;
   }
 

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java Fri Apr 30 22:26:19 2010
@@ -193,7 +193,7 @@ public class RandomTextWriter extends Co
       numMaps = 1;
       conf.setLong(BYTES_PER_MAP, totalBytesToWrite);
     }
-    conf.setInt(JobContext.NUM_MAPS, numMaps);
+    conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
     
     Job job = new Job(conf);
     

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java Fri Apr 30 22:26:19 2010
@@ -106,7 +106,7 @@ public class RandomWriter extends Config
       List<InputSplit> result = new ArrayList<InputSplit>();
       Path outDir = FileOutputFormat.getOutputPath(job);
       int numSplits = 
-            job.getConfiguration().getInt(JobContext.NUM_MAPS, 1);
+            job.getConfiguration().getInt(MRJobConfig.NUM_MAPS, 1);
       for(int i=0; i < numSplits; ++i) {
         result.add(new FileSplit(new Path(outDir, "dummy-split-" + i), 0, 1, 
                                   (String[])null));
@@ -259,7 +259,7 @@ public class RandomWriter extends Config
       numMaps = 1;
       conf.setLong(BYTES_PER_MAP, totalBytesToWrite);
     }
-    conf.setInt(JobContext.NUM_MAPS, numMaps);
+    conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
 
     Job job = new Job(conf);
     

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java Fri Apr 30 22:26:19 2010
@@ -177,7 +177,7 @@ public class DistributedPentomino extend
     int depth = conf.getInt(Pentomino.DEPTH, PENT_DEPTH);
     Class<? extends Pentomino> pentClass = conf.getClass(Pentomino.CLASS, 
       OneSidedPentomino.class, Pentomino.class);
-    int numMaps = conf.getInt(JobContext.NUM_MAPS, DEFAULT_MAPS);
+    int numMaps = conf.getInt(MRJobConfig.NUM_MAPS, DEFAULT_MAPS);
     Path output = new Path(args[0]);
     Path input = new Path(output + "_input");
     FileSystem fileSys = FileSystem.get(conf);

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java Fri Apr 30 22:26:19 2010
@@ -45,6 +45,7 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Partitioner;
 import org.apache.hadoop.mapreduce.RecordReader;
@@ -435,10 +436,10 @@ public final class DistSum extends Confi
     SummationWritable.write(sigma, DistSum.class, jobconf);
 
     // disable task timeout
-    jobconf.setLong(JobContext.TASK_TIMEOUT, 0);
+    jobconf.setLong(MRJobConfig.TASK_TIMEOUT, 0);
     // do not use speculative execution
-    jobconf.setBoolean(JobContext.MAP_SPECULATIVE, false);
-    jobconf.setBoolean(JobContext.REDUCE_SPECULATIVE, false);
+    jobconf.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
+    jobconf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
 
     return job; 
   }

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java Fri Apr 30 22:26:19 2010
@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
@@ -172,7 +173,7 @@ public class TeraGen extends Configured 
      */
     public List<InputSplit> getSplits(JobContext job) {
       long totalRows = getNumberOfRows(job);
-      int numSplits = job.getConfiguration().getInt(JobContext.NUM_MAPS, 1);
+      int numSplits = job.getConfiguration().getInt(MRJobConfig.NUM_MAPS, 1);
       LOG.info("Generating " + totalRows + " using " + numSplits);
       List<InputSplit> splits = new ArrayList<InputSplit>();
       long currentRow = 0;

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java Fri Apr 30 22:26:19 2010
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -56,7 +57,7 @@ public class TeraInputFormat extends Fil
   static final int KEY_LENGTH = 10;
   static final int VALUE_LENGTH = 90;
   static final int RECORD_LENGTH = KEY_LENGTH + VALUE_LENGTH;
-  private static JobContext lastContext = null;
+  private static MRJobConfig lastContext = null;
   private static List<InputSplit> lastResult = null;
 
   static class TeraFileSplit extends FileSplit {

Modified: hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java (original)
+++ hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java Fri Apr 30 22:26:19 2010
@@ -34,6 +34,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Partitioner;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.Tool;
@@ -153,7 +154,7 @@ public class TeraSort extends Configured
      */
     private static Text[] readPartitions(FileSystem fs, Path p,
         Configuration conf) throws IOException {
-      int reduces = conf.getInt(JobContext.NUM_REDUCES, 1);
+      int reduces = conf.getInt(MRJobConfig.NUM_REDUCES, 1);
       Text[] result = new Text[reduces - 1];
       DataInputStream reader = fs.open(p);
       for(int i=0; i < reduces - 1; ++i) {
@@ -242,7 +243,7 @@ public class TeraSort extends Configured
     public void setConf(Configuration conf) {
       this.conf = conf;
       prefixesPerReduce = (int) Math.ceil((1 << (8 * PREFIX_LENGTH)) / 
-        (float) conf.getInt(JobContext.NUM_REDUCES, 1));
+        (float) conf.getInt(MRJobConfig.NUM_REDUCES, 1));
     }
     
     public Configuration getConf() {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/AdminOperationsProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/AdminOperationsProtocol.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/AdminOperationsProtocol.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/AdminOperationsProtocol.java Fri Apr 30 22:26:19 2010
@@ -21,14 +21,14 @@ package org.apache.hadoop.mapred;
 import java.io.IOException;
 
 import org.apache.hadoop.ipc.VersionedProtocol;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.security.KerberosInfo;
 
 /**
  * Protocol for admin operations. This is a framework-public interface and is
  * NOT_TO_BE_USED_BY_USERS_DIRECTLY.
  */
-@KerberosInfo(JobContext.JOB_JOBTRACKER_ID)
+@KerberosInfo(MRJobConfig.JOB_JOBTRACKER_ID)
 public interface AdminOperationsProtocol extends VersionedProtocol {
   
   /**

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InterTrackerProtocol.java Fri Apr 30 22:26:19 2010
@@ -21,14 +21,14 @@ package org.apache.hadoop.mapred;
 import java.io.IOException;
 
 import org.apache.hadoop.ipc.VersionedProtocol;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.security.KerberosInfo;
 
 /** 
  * Protocol that a TaskTracker and the central JobTracker use to communicate.
  * The JobTracker is the Server, which implements this protocol.
  */ 
-@KerberosInfo(JobContext.JOB_JOBTRACKER_ID)
+@KerberosInfo(MRJobConfig.JOB_JOBTRACKER_ID)
 interface InterTrackerProtocol extends VersionedProtocol {
   /**
    * version 3 introduced to replace 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Fri Apr 30 22:26:19 2010
@@ -50,6 +50,7 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.JobSubmissionFiles;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
@@ -344,13 +345,13 @@ public class JobInProgress {
     (numMapTasks + numReduceTasks + 10);
     
     this.slowTaskThreshold = Math.max(0.0f,
-        conf.getFloat(JobContext.SPECULATIVE_SLOWTASK_THRESHOLD,1.0f));
+        conf.getFloat(MRJobConfig.SPECULATIVE_SLOWTASK_THRESHOLD,1.0f));
     this.speculativeCap = conf.getFloat(
-        JobContext.SPECULATIVECAP,0.1f);
+        MRJobConfig.SPECULATIVECAP,0.1f);
     this.slowNodeThreshold = conf.getFloat(
-        JobContext.SPECULATIVE_SLOWNODE_THRESHOLD,1.0f);
+        MRJobConfig.SPECULATIVE_SLOWNODE_THRESHOLD,1.0f);
     this.jobSetupCleanupNeeded = conf.getBoolean(
-        JobContext.SETUP_CLEANUP_NEEDED, true);
+        MRJobConfig.SETUP_CLEANUP_NEEDED, true);
     if (tracker != null) { // Some mock tests have null tracker
       this.jobHistory = tracker.getJobHistory();
     }
@@ -450,11 +451,11 @@ public class JobInProgress {
     this.nonRunningReduces = new LinkedList<TaskInProgress>();    
     this.runningReduces = new LinkedHashSet<TaskInProgress>();
     this.slowTaskThreshold = Math.max(0.0f,
-        conf.getFloat(JobContext.SPECULATIVE_SLOWTASK_THRESHOLD,1.0f));
+        conf.getFloat(MRJobConfig.SPECULATIVE_SLOWTASK_THRESHOLD,1.0f));
     this.speculativeCap = conf.getFloat(
-        JobContext.SPECULATIVECAP,0.1f);
+        MRJobConfig.SPECULATIVECAP,0.1f);
     this.slowNodeThreshold = conf.getFloat(
-        JobContext.SPECULATIVE_SLOWNODE_THRESHOLD,1.0f); 
+        MRJobConfig.SPECULATIVE_SLOWNODE_THRESHOLD,1.0f); 
     // register job's tokens for renewal
     DelegationTokenRenewal.registerDelegationTokensForRenewal(
         jobInfo.getJobID(), ts, this.conf);
@@ -616,7 +617,7 @@ public class JobInProgress {
     // we should start scheduling reduces
     completedMapsForReduceSlowstart = 
       (int)Math.ceil(
-          (conf.getFloat(JobContext.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 
+          (conf.getFloat(MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 
                          DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART) * 
            numMapTasks));
     
@@ -3266,7 +3267,7 @@ public class JobInProgress {
 
      }
      // remove jobs delegation tokens
-     if(conf.getBoolean(JobContext.JOB_CANCEL_DELEGATION_TOKEN, true)) {
+     if(conf.getBoolean(MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN, true)) {
        DelegationTokenRenewal.removeDelegationTokenRenewalForJob(jobId);
      } // else don't remove it.May be used by spawned tasks
    }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java Fri Apr 30 22:26:19 2010
@@ -64,6 +64,7 @@ import org.apache.hadoop.mapreduce.split
 import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.IndexedSortable;
@@ -522,7 +523,7 @@ class MapTask extends Task {
     private final Counters.Counter mapOutputRecordCounter;
     
     @SuppressWarnings("unchecked")
-    NewDirectOutputCollector(org.apache.hadoop.mapreduce.JobContext jobContext,
+    NewDirectOutputCollector(MRJobConfig jobContext,
         JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) 
     throws IOException, ClassNotFoundException, InterruptedException {
       this.reporter = reporter;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Fri Apr 30 22:26:19 2010
@@ -35,8 +35,8 @@ import java.util.Vector;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.filecache.TaskDistributedCacheManager;
 import org.apache.hadoop.mapreduce.filecache.TrackerDistributedCacheManager;
@@ -312,13 +312,13 @@ abstract class TaskRunner extends Thread
     Configuration aclConf = new Configuration(false);
 
     // set the job view acls in aclConf
-    String jobViewACLs = conf.get(JobContext.JOB_ACL_VIEW_JOB);
+    String jobViewACLs = conf.get(MRJobConfig.JOB_ACL_VIEW_JOB);
     if (jobViewACLs != null) {
-      aclConf.set(JobContext.JOB_ACL_VIEW_JOB, jobViewACLs);
+      aclConf.set(MRJobConfig.JOB_ACL_VIEW_JOB, jobViewACLs);
     }
     // set jobOwner as mapreduce.job.user.name in aclConf
     String jobOwner = conf.getUser();
-    aclConf.set(JobContext.USER_NAME, jobOwner);
+    aclConf.set(MRJobConfig.USER_NAME, jobOwner);
     FileOutputStream out = new FileOutputStream(aclFile);
     try {
       aclConf.writeXml(out);
@@ -492,7 +492,7 @@ abstract class TaskRunner extends Thread
       throws IOException {
 
     // add java.io.tmpdir given by mapreduce.task.tmp.dir
-    String tmp = conf.get(JobContext.TASK_TEMP_DIR, "./tmp");
+    String tmp = conf.get(MRJobConfig.TASK_TEMP_DIR, "./tmp");
     Path tmpDir = new Path(tmp);
 
     // if temp directory path is not absolute, prepend it with workDir.

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Fri Apr 30 22:26:19 2010
@@ -75,7 +75,7 @@ import org.apache.hadoop.mapred.TaskCont
 import org.apache.hadoop.mapred.TaskTrackerStatus.TaskTrackerHealthStatus;
 import org.apache.hadoop.mapred.pipes.Submitter;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.filecache.TrackerDistributedCacheManager;
 import org.apache.hadoop.mapreduce.security.SecureShuffleUtils;
@@ -233,7 +233,7 @@ public class TaskTracker 
   static final String JOBFILE = "job.xml";
   static final String JOB_TOKEN_FILE="jobToken"; //localized file
 
-  static final String JOB_LOCAL_DIR = JobContext.JOB_LOCAL_DIR;
+  static final String JOB_LOCAL_DIR = MRJobConfig.JOB_LOCAL_DIR;
 
   private JobConf fConf;
   private FileSystem localFs;
@@ -2261,8 +2261,8 @@ public class TaskTracker 
     }
     // Obtain physical memory limits from the job configuration
     long physicalMemoryLimit =
-      conf.getLong(isMap ? JobContext.MAP_MEMORY_PHYSICAL_MB :
-                   JobContext.REDUCE_MEMORY_PHYSICAL_MB,
+      conf.getLong(isMap ? MRJobConfig.MAP_MEMORY_PHYSICAL_MB :
+                   MRJobConfig.REDUCE_MEMORY_PHYSICAL_MB,
                    JobConf.DISABLED_MEMORY_LIMIT);
     if (physicalMemoryLimit > 0) {
       physicalMemoryLimit *= 1024L * 1024L;
@@ -2482,7 +2482,7 @@ public class TaskTracker 
     public synchronized void setJobConf(JobConf lconf){
       this.localJobConf = lconf;
       keepFailedTaskFiles = localJobConf.getKeepFailedTaskFiles();
-      taskTimeout = localJobConf.getLong(JobContext.TASK_TIMEOUT, 
+      taskTimeout = localJobConf.getLong(MRJobConfig.TASK_TIMEOUT, 
                                          10 * 60 * 1000);
     }
         
@@ -2805,7 +2805,7 @@ public class TaskTracker 
         getTaskController().runDebugScript(context);
         // add all lines of debug out to diagnostics
         try {
-          int num = localJobConf.getInt(JobContext.TASK_DEBUGOUT_LINES,
+          int num = localJobConf.getInt(MRJobConfig.TASK_DEBUGOUT_LINES,
               -1);
           addDiagnostics(FileUtil.makeShellPath(stdout),num,
               "DEBUG OUT");

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/UserLogCleaner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/UserLogCleaner.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/UserLogCleaner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/UserLogCleaner.java Fri Apr 30 22:26:19 2010
@@ -32,7 +32,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
 
@@ -149,7 +149,7 @@ class UserLogCleaner extends Thread {
 
   private int getUserlogRetainMillis(Configuration conf) {
     return (conf == null ? UserLogCleaner.DEFAULT_USER_LOG_RETAIN_HOURS
-        : conf.getInt(JobContext.USER_LOG_RETAIN_HOURS,
+        : conf.getInt(MRJobConfig.USER_LOG_RETAIN_HOURS,
             UserLogCleaner.DEFAULT_USER_LOG_RETAIN_HOURS)) * 1000 * 60 * 60;
   }
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java Fri Apr 30 22:26:19 2010
@@ -29,6 +29,7 @@ import org.apache.hadoop.mapred.FileOutp
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.util.Progressable;
 
 /**
@@ -189,7 +190,7 @@ extends FileOutputFormat<K, V> {
    * @return the outfile name based on a given anme and the input file name.
    */
   protected String getInputFileBasedOutputFileName(JobConf job, String name) {
-    String infilepath = job.get(JobContext.MAP_INPUT_FILE);
+    String infilepath = job.get(MRJobConfig.MAP_INPUT_FILE);
     if (infilepath == null) {
       // if the {@link JobContext#MAP_INPUT_FILE} does not exists,
       // then return the given name

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java Fri Apr 30 22:26:19 2010
@@ -28,7 +28,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
@@ -71,7 +71,7 @@ public class DBOutputFormat<K  extends D
       JobConf job, String name, Progressable progress) throws IOException {
     org.apache.hadoop.mapreduce.RecordWriter<K, V> w = super.getRecordWriter(
       new TaskAttemptContextImpl(job, 
-            TaskAttemptID.forName(job.get(JobContext.TASK_ATTEMPT_ID))));
+            TaskAttemptID.forName(job.get(MRJobConfig.TASK_ATTEMPT_ID))));
     org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter writer = 
      (org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter) w;
     try {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java Fri Apr 30 22:26:19 2010
@@ -29,7 +29,7 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.FloatWritable;
@@ -98,7 +98,7 @@ class Application<K1 extends WritableCom
     cmd.add(executable);
     // wrap the command in a stdout/stderr capture
     TaskAttemptID taskid = 
-      TaskAttemptID.forName(conf.get(JobContext.TASK_ATTEMPT_ID));
+      TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID));
     File stdout = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDOUT);
     File stderr = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDERR);
     long logLength = TaskLog.getTaskLogLength(conf);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java Fri Apr 30 22:26:19 2010
@@ -30,7 +30,7 @@ import org.apache.hadoop.mapred.OutputCo
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SkipBadRecords;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 /**
  * An adaptor to run a C++ mapper.
@@ -77,7 +77,7 @@ class PipesMapRunner<K1 extends Writable
     boolean isJavaInput = Submitter.getIsJavaRecordReader(job);
     downlink.runMap(reporter.getInputSplit(), 
                     job.getNumReduceTasks(), isJavaInput);
-    boolean skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
+    boolean skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false);
     try {
       if (isJavaInput) {
         // allocate key & value instances that are re-used for all entries

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java Fri Apr 30 22:26:19 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.OutputCo
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SkipBadRecords;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 import java.io.IOException;
 import java.util.Iterator;
@@ -50,7 +50,7 @@ class PipesReducer<K2 extends WritableCo
     //disable the auto increment of the counter. For pipes, no of processed 
     //records could be different(equal or less) than the no of records input.
     SkipBadRecords.setAutoIncrReducerProcCount(job, false);
-    skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
+    skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false);
   }
 
   /**

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java Fri Apr 30 22:26:19 2010
@@ -39,7 +39,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -290,10 +290,10 @@ public class Submitter extends Configure
       }
     }
     String textClassname = Text.class.getName();
-    setIfUnset(conf, JobContext.MAP_OUTPUT_KEY_CLASS, textClassname);
-    setIfUnset(conf, JobContext.MAP_OUTPUT_VALUE_CLASS, textClassname);
-    setIfUnset(conf, JobContext.OUTPUT_KEY_CLASS, textClassname);
-    setIfUnset(conf, JobContext.OUTPUT_VALUE_CLASS, textClassname);
+    setIfUnset(conf, MRJobConfig.MAP_OUTPUT_KEY_CLASS, textClassname);
+    setIfUnset(conf, MRJobConfig.MAP_OUTPUT_VALUE_CLASS, textClassname);
+    setIfUnset(conf, MRJobConfig.OUTPUT_KEY_CLASS, textClassname);
+    setIfUnset(conf, MRJobConfig.OUTPUT_VALUE_CLASS, textClassname);
     
     // Use PipesNonJavaInputFormat if necessary to handle progress reporting
     // from C++ RecordReaders ...
@@ -313,8 +313,8 @@ public class Submitter extends Configure
       DistributedCache.createSymlink(conf);
       // set default gdb commands for map and reduce task 
       String defScript = "$HADOOP_HOME/src/c++/pipes/debug/pipes-default-script";
-      setIfUnset(conf, JobContext.MAP_DEBUG_SCRIPT,defScript);
-      setIfUnset(conf, JobContext.REDUCE_DEBUG_SCRIPT,defScript);
+      setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT,defScript);
+      setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT,defScript);
     }
     URI[] fileCache = DistributedCache.getCacheFiles(conf);
     if (fileCache == null) {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobACL.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobACL.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobACL.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobACL.java Fri Apr 30 22:26:19 2010
@@ -29,14 +29,14 @@ public enum JobACL {
    * ACL for 'viewing' job. Dictates who can 'view' some or all of the job
    * related details.
    */
-  VIEW_JOB(JobContext.JOB_ACL_VIEW_JOB),
+  VIEW_JOB(MRJobConfig.JOB_ACL_VIEW_JOB),
 
   /**
    * ACL for 'modifying' job. Dictates who can 'modify' the job for e.g., by
    * killing the job, killing/failing a task of the job or setting priority of
    * the job.
    */
-  MODIFY_JOB(JobContext.JOB_ACL_MODIFY_JOB);
+  MODIFY_JOB(MRJobConfig.JOB_ACL_MODIFY_JOB);
 
   String aclName;
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java Fri Apr 30 22:26:19 2010
@@ -35,215 +35,7 @@ import org.apache.hadoop.mapreduce.Mappe
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface JobContext {
-  // Put all of the attribute names in here so that Job and JobContext are
-  // consistent.
-  public static final String INPUT_FORMAT_CLASS_ATTR = 
-    "mapreduce.job.inputformat.class";
-  public static final String MAP_CLASS_ATTR = "mapreduce.job.map.class";
-  public static final String COMBINE_CLASS_ATTR = 
-    "mapreduce.job.combine.class";
-  public static final String REDUCE_CLASS_ATTR = 
-    "mapreduce.job.reduce.class";
-  public static final String OUTPUT_FORMAT_CLASS_ATTR = 
-    "mapreduce.job.outputformat.class";
-  public static final String PARTITIONER_CLASS_ATTR = 
-    "mapreduce.job.partitioner.class";
-
-  public static final String SETUP_CLEANUP_NEEDED = 
-    "mapreduce.job.committer.setup.cleanup.needed";
-  public static final String JAR = "mapreduce.job.jar";
-  public static final String ID = "mapreduce.job.id";
-  public static final String JOB_NAME = "mapreduce.job.name";
-  public static final String JAR_UNPACK_PATTERN = "mapreduce.job.jar.unpack.pattern";
-  public static final String USER_NAME = "mapreduce.job.user.name";
-  public static final String PRIORITY = "mapreduce.job.priority";
-  public static final String QUEUE_NAME = "mapreduce.job.queuename";
-  public static final String JVM_NUMTASKS_TORUN = 
-    "mapreduce.job.jvm.numtasks";
-  public static final String SPLIT_FILE = "mapreduce.job.splitfile";
-  public static final String NUM_MAPS = "mapreduce.job.maps";
-  public static final String MAX_TASK_FAILURES_PER_TRACKER = 
-    "mapreduce.job.maxtaskfailures.per.tracker";
-  public static final String COMPLETED_MAPS_FOR_REDUCE_SLOWSTART =
-    "mapreduce.job.reduce.slowstart.completedmaps";
-  public static final String NUM_REDUCES = "mapreduce.job.reduces";
-  public static final String SKIP_RECORDS = "mapreduce.job.skiprecords";
-  public static final String SKIP_OUTDIR = "mapreduce.job.skip.outdir";
-  public static final String SPECULATIVE_SLOWNODE_THRESHOLD =
-    "mapreduce.job.speculative.slownodethreshold";
-  public static final String SPECULATIVE_SLOWTASK_THRESHOLD = 
-    "mapreduce.job.speculative.slowtaskthreshold";
-  public static final String SPECULATIVECAP = 
-    "mapreduce.job.speculative.speculativecap";
-  public static final String JOB_LOCAL_DIR = "mapreduce.job.local.dir";
-  public static final String OUTPUT_KEY_CLASS = 
-    "mapreduce.job.output.key.class";
-  public static final String OUTPUT_VALUE_CLASS = 
-    "mapreduce.job.output.value.class";
-  public static final String KEY_COMPARATOR = 
-    "mapreduce.job.output.key.comparator.class";
-  public static final String GROUP_COMPARATOR_CLASS = 
-    "mapreduce.job.output.group.comparator.class";
-  public static final String WORKING_DIR = "mapreduce.job.working.dir";
-  public static final String HISTORY_LOCATION = 
-    "mapreduce.job.userhistorylocation"; 
-  public static final String END_NOTIFICATION_URL = 
-    "mapreduce.job.end-notification.url";
-  public static final String END_NOTIFICATION_RETRIES = 
-    "mapreduce.job.end-notification.retry.attempts";
-  public static final String END_NOTIFICATION_RETRIE_INTERVAL = 
-    "mapreduce.job.end-notification.retry.interval";
-  public static final String CLASSPATH_ARCHIVES = 
-    "mapreduce.job.classpath.archives";
-  public static final String CLASSPATH_FILES = "mapreduce.job.classpath.files";
-  public static final String CACHE_FILES = "mapreduce.job.cache.files";
-  public static final String CACHE_ARCHIVES = "mapreduce.job.cache.archives";
-  public static final String CACHE_FILES_SIZES =
-    "mapreduce.job.cache.files.filesizes";    // internal use only
-  public static final String CACHE_ARCHIVES_SIZES =
-    "mapreduce.job.cache.archives.filesizes"; // ditto
-  public static final String CACHE_LOCALFILES =
-    "mapreduce.job.cache.local.files";
-  public static final String CACHE_LOCALARCHIVES = 
-    "mapreduce.job.cache.local.archives";
-  public static final String CACHE_FILE_TIMESTAMPS = 
-    "mapreduce.job.cache.files.timestamps";
-  public static final String CACHE_ARCHIVES_TIMESTAMPS = 
-    "mapreduce.job.cache.archives.timestamps";
-  public static final String CACHE_FILE_VISIBILITIES = 
-    "mapreduce.job.cache.files.visibilities";
-  public static final String CACHE_ARCHIVES_VISIBILITIES = 
-    "mapreduce.job.cache.archives.visibilities";
-  public static final String CACHE_SYMLINK = 
-    "mapreduce.job.cache.symlink.create";
-  public static final String USER_LOG_RETAIN_HOURS = 
-    "mapreduce.job.userlog.retain.hours";
-  
-  public static final String IO_SORT_FACTOR = 
-    "mapreduce.task.io.sort.factor"; 
-  public static final String IO_SORT_MB = "mapreduce.task.io.sort.mb";
-  public static final String PRESERVE_FAILED_TASK_FILES = 
-    "mapreduce.task.files.preserve.failedtasks";
-  public static final String PRESERVE_FILES_PATTERN = 
-    "mapreduce.task.files.preserve.filepattern";
-  public static final String TASK_TEMP_DIR = "mapreduce.task.tmp.dir";
-  public static final String TASK_DEBUGOUT_LINES = 
-    "mapreduce.task.debugout.lines";
-  public static final String RECORDS_BEFORE_PROGRESS = 
-    "mapreduce.task.merge.progress.records";
-  public static final String SKIP_START_ATTEMPTS = 
-    "mapreduce.task.skip.start.attempts";
-  public static final String TASK_ATTEMPT_ID = "mapreduce.task.attempt.id";
-  public static final String TASK_ISMAP = "mapreduce.task.ismap";
-  public static final String TASK_PARTITION = "mapreduce.task.partition";
-  public static final String TASK_PROFILE = "mapreduce.task.profile";
-  public static final String TASK_PROFILE_PARAMS = 
-    "mapreduce.task.profile.params";
-  public static final String NUM_MAP_PROFILES = 
-    "mapreduce.task.profile.maps";
-  public static final String NUM_REDUCE_PROFILES = 
-    "mapreduce.task.profile.reduces";
-  public static final String TASK_TIMEOUT = "mapreduce.task.timeout";
-  public static final String TASK_ID = "mapreduce.task.id";
-  public static final String TASK_OUTPUT_DIR = "mapreduce.task.output.dir";
-  public static final String TASK_USERLOG_LIMIT = 
-    "mapreduce.task.userlog.limit.kb";
-  
-  public static final String MAP_SORT_SPILL_PERCENT =
-    "mapreduce.map.sort.spill.percent";
-  public static final String MAP_INPUT_FILE = "mapreduce.map.input.file";
-  public static final String MAP_INPUT_PATH = "mapreduce.map.input.length";
-  public static final String MAP_INPUT_START = "mapreduce.map.input.start";
-  public static final String MAP_MEMORY_MB = "mapreduce.map.memory.mb";
-  public static final String MAP_MEMORY_PHYSICAL_MB =
-    "mapreduce.map.memory.physical.mb";
-  public static final String MAP_ENV = "mapreduce.map.env";
-  public static final String MAP_JAVA_OPTS = "mapreduce.map.java.opts";
-  public static final String MAP_ULIMIT = "mapreduce.map.ulimit"; 
-  public static final String MAP_MAX_ATTEMPTS = "mapreduce.map.maxattempts";
-  public static final String MAP_DEBUG_SCRIPT = 
-    "mapreduce.map.debug.script";
-  public static final String MAP_SPECULATIVE = "mapreduce.map.speculative";
-  public static final String MAP_FAILURES_MAX_PERCENT = 
-    "mapreduce.map.failures.maxpercent";
-  public static final String MAP_SKIP_INCR_PROC_COUNT = 
-    "mapreduce.map.skip.proc-count.auto-incr";
-  public static final String MAP_SKIP_MAX_RECORDS = 
-    "mapreduce.map.skip.maxrecords";
-  public static final String MAP_COMBINE_MIN_SPILLS = 
-    "mapreduce.map.combine.minspills";
-  public static final String MAP_OUTPUT_COMPRESS = 
-    "mapreduce.map.output.compress";
-  public static final String MAP_OUTPUT_COMPRESS_CODEC = 
-    "mapreduce.map.output.compress.codec";
-  public static final String MAP_OUTPUT_KEY_CLASS = 
-    "mapreduce.map.output.key.class";
-  public static final String MAP_OUTPUT_VALUE_CLASS = 
-    "mapreduce.map.output.value.class";
-  public static final String MAP_OUTPUT_KEY_FIELD_SEPERATOR = 
-    "mapreduce.map.output.key.field.separator";
-  public static final String MAP_LOG_LEVEL = "mapreduce.map.log.level";
- 
-  public static final String REDUCE_LOG_LEVEL = 
-    "mapreduce.reduce.log.level";
-  public static final String REDUCE_MERGE_INMEM_THRESHOLD = 
-    "mapreduce.reduce.merge.inmem.threshold";
-  public static final String REDUCE_INPUT_BUFFER_PERCENT = 
-    "mapreduce.reduce.input.buffer.percent";
-  public static final String REDUCE_MARKRESET_BUFFER_PERCENT = 
-    "mapreduce.reduce.markreset.buffer.percent";
-  public static final String REDUCE_MARKRESET_BUFFER_SIZE = 
-    "mapreduce.reduce.markreset.buffer.size";
-  public static final String REDUCE_MEMORY_PHYSICAL_MB = 
-    "mapreduce.reduce.memory.physical.mb";
-  public static final String REDUCE_MEMORY_MB = 
-    "mapreduce.reduce.memory.mb";
-  public static final String REDUCE_MEMORY_TOTAL_BYTES = 
-    "mapreduce.reduce.memory.totalbytes";
-  public static final String SHUFFLE_INPUT_BUFFER_PERCENT = 
-    "mapreduce.reduce.shuffle.input.buffer.percent";
-  public static final String SHUFFLE_MERGE_EPRCENT = 
-    "mapreduce.reduce.shuffle.merge.percent";
-  public static final String REDUCE_FAILURES_MAXPERCENT = 
-   "mapreduce.reduce.failures.maxpercent";
-  public static final String REDUCE_ENV = "mapreduce.reduce.env";
-  public static final String REDUCE_JAVA_OPTS = 
-    "mapreduce.reduce.java.opts";
-  public static final String REDUCE_ULIMIT = "mapreduce.reduce.ulimit"; 
-  public static final String REDUCE_MAX_ATTEMPTS = 
-    "mapreduce.reduce.maxattempts";
-  public static final String SHUFFLE_PARALLEL_COPIES = 
-    "mapreduce.reduce.shuffle.parallelcopies";
-  public static final String REDUCE_DEBUG_SCRIPT = 
-    "mapreduce.reduce.debug.script";
-  public static final String REDUCE_SPECULATIVE = 
-    "mapreduce.reduce.speculative";
-  public static final String SHUFFLE_CONNECT_TIMEOUT = 
-    "mapreduce.reduce.shuffle.connect.timeout";
-  public static final String SHUFFLE_READ_TIMEOUT = 
-    "mapreduce.reduce.shuffle.read.timeout";
-  public static final String SHUFFLE_FETCH_FAILURES = 
-    "mapreduce.reduce.shuffle.maxfetchfailures";
-  public static final String SHUFFLE_NOTIFY_READERROR = 
-    "mapreduce.reduce.shuffle.notify.readerror";
-  public static final String REDUCE_SKIP_INCR_PROC_COUNT = 
-    "mapreduce.reduce.skip.proc-count.auto-incr";
-  public static final String REDUCE_SKIP_MAXGROUPS = 
-    "mapreduce.reduce.skip.maxgroups";
-  public static final String REDUCE_MEMTOMEM_THRESHOLD = 
-    "mapreduce.reduce.merge.memtomem.threshold";
-  public static final String REDUCE_MEMTOMEM_ENABLED = 
-    "mapreduce.reduce.merge.memtomem.enabled";
-  public static final String JOB_NAMENODES = "mapreduce.job.hdfs-servers";
-  public static final String JOB_JOBTRACKER_ID = "mapreduce.job.kerberos.jtprinicipal";
-  public static final String JOB_CANCEL_DELEGATION_TOKEN = 
-    "mapreduce.job.complete.cancel.delegation.tokens";
-  public static final String JOB_ACL_VIEW_JOB =
-      "mapreduce.job.acl-view-job";
-  public static final String JOB_ACL_MODIFY_JOB =
-      "mapreduce.job.acl-modify-job";
-
+public interface JobContext extends MRJobConfig {
   /**
    * Return the configuration for the job.
    * @return the shared configuration object

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java Fri Apr 30 22:26:19 2010
@@ -497,7 +497,7 @@ class JobSubmitter {
     }
     
     // add the delegation tokens from configuration
-    String [] nameNodes = conf.getStrings(JobContext.JOB_NAMENODES);
+    String [] nameNodes = conf.getStrings(MRJobConfig.JOB_NAMENODES);
     LOG.info("adding the following namenodes' delegation tokens:" + Arrays.toString(nameNodes));
     if(nameNodes != null) {
       Path [] ps = new Path[nameNodes.length];

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java Fri Apr 30 22:26:19 2010
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.DefaultTaskController;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 import java.net.URI;
 
@@ -340,7 +341,7 @@ public class DistributedCache {
   @Deprecated
   public static void setCacheArchives(URI[] archives, Configuration conf) {
     String sarchives = StringUtils.uriToString(archives);
-    conf.set(JobContext.CACHE_ARCHIVES, sarchives);
+    conf.set(MRJobConfig.CACHE_ARCHIVES, sarchives);
   }
 
   /**
@@ -353,7 +354,7 @@ public class DistributedCache {
   @Deprecated
   public static void setCacheFiles(URI[] files, Configuration conf) {
     String sfiles = StringUtils.uriToString(files);
-    conf.set(JobContext.CACHE_FILES, sfiles);
+    conf.set(MRJobConfig.CACHE_FILES, sfiles);
   }
 
   /**
@@ -366,7 +367,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static URI[] getCacheArchives(Configuration conf) throws IOException {
-    return StringUtils.stringToURI(conf.getStrings(JobContext.CACHE_ARCHIVES));
+    return StringUtils.stringToURI(conf.getStrings(MRJobConfig.CACHE_ARCHIVES));
   }
 
   /**
@@ -379,7 +380,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static URI[] getCacheFiles(Configuration conf) throws IOException {
-    return StringUtils.stringToURI(conf.getStrings(JobContext.CACHE_FILES));
+    return StringUtils.stringToURI(conf.getStrings(MRJobConfig.CACHE_FILES));
   }
 
   /**
@@ -394,7 +395,7 @@ public class DistributedCache {
   public static Path[] getLocalCacheArchives(Configuration conf)
     throws IOException {
     return StringUtils.stringToPath(conf
-                                    .getStrings(JobContext.CACHE_LOCALARCHIVES));
+                                    .getStrings(MRJobConfig.CACHE_LOCALARCHIVES));
   }
 
   /**
@@ -408,7 +409,7 @@ public class DistributedCache {
   @Deprecated
   public static Path[] getLocalCacheFiles(Configuration conf)
     throws IOException {
-    return StringUtils.stringToPath(conf.getStrings(JobContext.CACHE_LOCALFILES));
+    return StringUtils.stringToPath(conf.getStrings(MRJobConfig.CACHE_LOCALFILES));
   }
 
   /**
@@ -421,7 +422,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static String[] getArchiveTimestamps(Configuration conf) {
-    return conf.getStrings(JobContext.CACHE_ARCHIVES_TIMESTAMPS);
+    return conf.getStrings(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS);
   }
 
 
@@ -435,7 +436,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static String[] getFileTimestamps(Configuration conf) {
-    return conf.getStrings(JobContext.CACHE_FILE_TIMESTAMPS);
+    return conf.getStrings(MRJobConfig.CACHE_FILE_TIMESTAMPS);
   }
 
   /**
@@ -505,8 +506,8 @@ public class DistributedCache {
    */
   @Deprecated
   public static void addCacheArchive(URI uri, Configuration conf) {
-    String archives = conf.get(JobContext.CACHE_ARCHIVES);
-    conf.set(JobContext.CACHE_ARCHIVES, archives == null ? uri.toString()
+    String archives = conf.get(MRJobConfig.CACHE_ARCHIVES);
+    conf.set(MRJobConfig.CACHE_ARCHIVES, archives == null ? uri.toString()
              : archives + "," + uri.toString());
   }
   
@@ -519,8 +520,8 @@ public class DistributedCache {
    */
   @Deprecated
   public static void addCacheFile(URI uri, Configuration conf) {
-    String files = conf.get(JobContext.CACHE_FILES);
-    conf.set(JobContext.CACHE_FILES, files == null ? uri.toString() : files + ","
+    String files = conf.get(MRJobConfig.CACHE_FILES);
+    conf.set(MRJobConfig.CACHE_FILES, files == null ? uri.toString() : files + ","
              + uri.toString());
   }
 
@@ -535,8 +536,8 @@ public class DistributedCache {
   @Deprecated
   public static void addFileToClassPath(Path file, Configuration conf)
     throws IOException {
-    String classpath = conf.get(JobContext.CLASSPATH_FILES);
-    conf.set(JobContext.CLASSPATH_FILES, classpath == null ? file.toString()
+    String classpath = conf.get(MRJobConfig.CLASSPATH_FILES);
+    conf.set(MRJobConfig.CLASSPATH_FILES, classpath == null ? file.toString()
              : classpath + "," + file.toString());
     FileSystem fs = FileSystem.get(conf);
     URI uri = fs.makeQualified(file).toUri();
@@ -554,7 +555,7 @@ public class DistributedCache {
   @Deprecated
   public static Path[] getFileClassPaths(Configuration conf) {
     ArrayList<String> list = (ArrayList<String>)conf.getStringCollection(
-                                JobContext.CLASSPATH_FILES);
+                                MRJobConfig.CLASSPATH_FILES);
     if (list.size() == 0) { 
       return null; 
     }
@@ -576,8 +577,8 @@ public class DistributedCache {
   @Deprecated
   public static void addArchiveToClassPath(Path archive, Configuration conf)
     throws IOException {
-    String classpath = conf.get(JobContext.CLASSPATH_ARCHIVES);
-    conf.set(JobContext.CLASSPATH_ARCHIVES, classpath == null ? archive
+    String classpath = conf.get(MRJobConfig.CLASSPATH_ARCHIVES);
+    conf.set(MRJobConfig.CLASSPATH_ARCHIVES, classpath == null ? archive
              .toString() : classpath + "," + archive.toString());
     FileSystem fs = FileSystem.get(conf);
     URI uri = fs.makeQualified(archive).toUri();
@@ -595,7 +596,7 @@ public class DistributedCache {
   @Deprecated
   public static Path[] getArchiveClassPaths(Configuration conf) {
     ArrayList<String> list = (ArrayList<String>)conf.getStringCollection(
-                                JobContext.CLASSPATH_ARCHIVES);
+                                MRJobConfig.CLASSPATH_ARCHIVES);
     if (list.size() == 0) { 
       return null; 
     }
@@ -615,7 +616,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static void createSymlink(Configuration conf){
-    conf.set(JobContext.CACHE_SYMLINK, "yes");
+    conf.set(MRJobConfig.CACHE_SYMLINK, "yes");
   }
   
   /**
@@ -628,7 +629,7 @@ public class DistributedCache {
    */
   @Deprecated
   public static boolean getSymlink(Configuration conf){
-    String result = conf.get(JobContext.CACHE_SYMLINK);
+    String result = conf.get(MRJobConfig.CACHE_SYMLINK);
     if ("yes".equals(result)){
       return true;
     }