You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ma...@apache.org on 2011/09/18 04:52:45 UTC

svn commit: r1172172 - in /hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/o...

Author: mahadev
Date: Sun Sep 18 02:52:44 2011
New Revision: 1172172

URL: http://svn.apache.org/viewvc?rev=1172172&view=rev
Log:
MAPREDUCE-1788. o.a.h.mapreduce.Job shouldn't make a copy of the JobConf. (Arun Murthy via mahadev) - Merging r1172171 from trunk

Modified:
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt Sun Sep 18 02:52:44 2011
@@ -1320,6 +1320,9 @@ Release 0.22.0 - Unreleased
     MAPREDUCE-2994. Fixed a bug in ApplicationID parsing that affects RM
     UI. (Devaraj K via vinodkv)
 
+    MAPREDUCE-1788. o.a.h.mapreduce.Job shouldn't make a copy of the JobConf.
+    (Arun Murthy via mahadev)
+
   NEW FEATURES
 
     MAPREDUCE-1804. Stress-test tool for HDFS introduced in HDFS-708.

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Sun Sep 18 02:52:44 2011
@@ -535,7 +535,7 @@ public class JobClient extends CLI {
     try {
       conf.setBooleanIfUnset("mapred.mapper.new-api", false);
       conf.setBooleanIfUnset("mapred.reducer.new-api", false);
-      Job job = Job.getInstance(cluster, conf);
+      Job job = Job.getInstance(conf);
       job.submit();
       return new NetworkedJob(job);
     } catch (InterruptedException ie) {

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java Sun Sep 18 02:52:44 2011
@@ -112,7 +112,7 @@ public class Cluster {
   private Job[] getJobs(JobStatus[] stats) throws IOException {
     List<Job> jobs = new ArrayList<Job>();
     for (JobStatus stat : stats) {
-      jobs.add(new Job(this, stat, new JobConf(stat.getJobFile())));
+      jobs.add(Job.getInstance(this, stat, new JobConf(stat.getJobFile())));
     }
     return jobs.toArray(new Job[0]);
   }
@@ -152,7 +152,7 @@ public class Cluster {
   public Job getJob(JobID jobId) throws IOException, InterruptedException {
     JobStatus status = client.getJobStatus(jobId);
     if (status != null) {
-      return new Job(this, status, new JobConf(status.getJobFile()));
+      return Job.getInstance(this, status, new JobConf(status.getJobFile()));
     }
     return null;
   }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java Sun Sep 18 02:52:44 2011
@@ -31,22 +31,22 @@ import java.net.URLConnection;
 import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
+import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -130,7 +130,7 @@ public class Job extends JobContextImpl 
 
   @Deprecated
   public Job(Configuration conf) throws IOException {
-    this(new Cluster(conf), conf);
+    this(new JobConf(conf));
   }
 
   @Deprecated
@@ -139,18 +139,13 @@ public class Job extends JobContextImpl 
     setJobName(jobName);
   }
 
-  Job(Cluster cluster) throws IOException {
-    this(cluster, new Configuration());
-  }
-
-  Job(Cluster cluster, Configuration conf) throws IOException {
+  Job(JobConf conf) throws IOException {
     super(conf, null);
-    this.cluster = cluster;
+    this.cluster = null;
   }
 
-  Job(Cluster cluster, JobStatus status,
-             Configuration conf) throws IOException {
-    this(cluster, conf);
+  Job(JobStatus status, JobConf conf) throws IOException {
+    this(conf);
     setJobID(status.getJobID());
     this.status = status;
     state = JobState.RUNNING;
@@ -170,7 +165,13 @@ public class Job extends JobContextImpl 
   }
       
   /**
-   * Creates a new {@link Job} with no particular {@link Cluster} .
+   * Creates a new {@link Job} with no particular {@link Cluster} and a 
+   * given {@link Configuration}.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
    * A Cluster will be created from the conf parameter only when it's needed.
    * 
    * @param conf the configuration
@@ -179,13 +180,18 @@ public class Job extends JobContextImpl 
    */
   public static Job getInstance(Configuration conf) throws IOException {
     // create with a null Cluster
-    return new Job(null, conf);
+    JobConf jobConf = new JobConf(conf);
+    return new Job(jobConf);
   }
 
       
   /**
    * Creates a new {@link Job} with no particular {@link Cluster} and a given jobName.
    * A Cluster will be created from the conf parameter only when it's needed.
+   *
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
    * 
    * @param conf the configuration
    * @return the {@link Job} , with no connection to a cluster yet.
@@ -194,25 +200,92 @@ public class Job extends JobContextImpl 
   public static Job getInstance(Configuration conf, String jobName)
            throws IOException {
     // create with a null Cluster
-    Job result = new Job(null, conf);
+    Job result = getInstance(conf);
     result.setJobName(jobName);
     return result;
   }
   
-  public static Job getInstance(Cluster cluster) throws IOException {
-     return new Job(cluster);
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration} and {@link JobStatus}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param status job status
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   */
+  public static Job getInstance(JobStatus status, Configuration conf) 
+  throws IOException {
+    return new Job(status, new JobConf(conf));
+  }
+
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   *
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param ignored
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   * @deprecated Use {@link #getInstance()}
+   */
+  @Deprecated
+  public static Job getInstance(Cluster ignored) throws IOException {
+    return getInstance();
   }
   
-  public static Job getInstance(Cluster cluster, Configuration conf) 
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param ignored
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   * @deprecated Use {@link #getInstance(Configuration)}
+   */
+  @Deprecated
+  public static Job getInstance(Cluster ignored, Configuration conf) 
       throws IOException {
-    return new Job(cluster, conf);
+    return getInstance(conf);
   }
   
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration} and {@link JobStatus}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param cluster cluster
+   * @param status job status
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   */
+  @Private
   public static Job getInstance(Cluster cluster, JobStatus status, 
       Configuration conf) throws IOException {
-    return new Job(cluster, status, conf);
+    Job job = getInstance(status, conf);
+    job.setCluster(cluster);
+    return job;
   }
-  
+
   private void ensureState(JobState state) throws IllegalStateException {
     if (state != this.state) {
       throw new IllegalStateException("Job in state "+ this.state + 
@@ -254,6 +327,10 @@ public class Job extends JobContextImpl 
     updateStatus();
     return status;
   }
+  
+  private void setStatus(JobStatus status) {
+    this.status = status;
+  }
 
   /**
    * Returns the current state of the Job.
@@ -354,6 +431,12 @@ public class Job extends JobContextImpl 
     return status.isRetired();
   }
 
+  /** Only for mocks in unit tests. */
+  @Private
+  private void setCluster(Cluster cluster) {
+    this.cluster = cluster;
+  }
+
   /**
    * Dump stats to screen.
    */
@@ -1055,6 +1138,12 @@ public class Job extends JobContextImpl 
     return cluster != null;
   }
 
+  /** Only for mocking via unit tests. */
+  @Private
+  public JobSubmitter getJobSubmitter(FileSystem fs, 
+      ClientProtocol submitClient) throws IOException {
+    return new JobSubmitter(fs, submitClient);
+  }
   /**
    * Submit the job to the cluster and return immediately.
    * @throws IOException
@@ -1064,8 +1153,8 @@ public class Job extends JobContextImpl 
     ensureState(JobState.DEFINE);
     setUseNewAPI();
     connect();
-    final JobSubmitter submitter = new JobSubmitter(cluster.getFileSystem(),
-        cluster.getClient());
+    final JobSubmitter submitter = 
+        getJobSubmitter(cluster.getFileSystem(), cluster.getClient());
     status = ugi.doAs(new PrivilegedExceptionAction<JobStatus>() {
       public JobStatus run() throws IOException, InterruptedException, 
       ClassNotFoundException {
@@ -1114,7 +1203,7 @@ public class Job extends JobContextImpl 
       throws IOException, InterruptedException {
     String lastReport = null;
     Job.TaskStatusFilter filter;
-    Configuration clientConf = cluster.getConf();
+    Configuration clientConf = getConfiguration();
     filter = Job.getTaskOutputFilter(clientConf);
     JobID jobId = getJobID();
     LOG.info("Running job: " + jobId);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Sun Sep 18 02:52:44 2011
@@ -319,7 +319,6 @@ class JobSubmitter {
    * @throws InterruptedException
    * @throws IOException
    */
-  @SuppressWarnings("unchecked")
   JobStatus submitJobInternal(Job job, Cluster cluster) 
   throws ClassNotFoundException, InterruptedException, IOException {
 

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java Sun Sep 18 02:52:44 2011
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -60,7 +61,11 @@ public class JobContextImpl implements J
   protected final Credentials credentials;
   
   public JobContextImpl(Configuration conf, JobID jobId) {
-    this.conf = new org.apache.hadoop.mapred.JobConf(conf);
+    if (conf instanceof JobConf) {
+      this.conf = (JobConf)conf;
+    } else {
+      this.conf = new JobConf(conf);
+    }
     this.jobId = jobId;
     this.credentials = this.conf.getCredentials();
     try {

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Sun Sep 18 02:52:44 2011
@@ -215,7 +215,7 @@ public class CLI extends Configured impl
     // Submit the request
     try {
       if (submitJobFile != null) {
-        Job job = Job.getInstance(cluster, new JobConf(submitJobFile));
+        Job job = Job.getInstance(new JobConf(submitJobFile));
         job.submit();
         System.out.println("Created job " + job.getJobID());
         exitCode = 0;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java Sun Sep 18 02:52:44 2011
@@ -64,7 +64,7 @@ public class TestJobMonitorAndPrint exte
     when(cluster.getClient()).thenReturn(clientProtocol);
     JobStatus jobStatus = new JobStatus(new JobID("job_000", 1), 0f, 0f, 0f, 0f, 
         State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname", "tmp-jobfile", "tmp-url");
-    job = new Job(cluster, jobStatus, conf);
+    job = Job.getInstance(cluster, jobStatus, conf);
     job = spy(job);
   }
 

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java Sun Sep 18 02:52:44 2011
@@ -223,23 +223,10 @@ public class YARNRunner implements Clien
       throw new YarnException(e);
     }
 
-    // XXX Remove
-    Path submitJobDir = new Path(jobSubmitDir);
-    FileContext defaultFS = FileContext.getFileContext(conf);
-    Path submitJobFile =
-      defaultFS.makeQualified(JobSubmissionFiles.getJobConfPath(submitJobDir));
-    FSDataInputStream in = defaultFS.open(submitJobFile);
-    conf.addResource(in);
-    // ---
-
     // Construct necessary information to start the MR AM
     ApplicationSubmissionContext appContext = 
       createApplicationSubmissionContext(conf, jobSubmitDir, ts);
     
-    // XXX Remove
-    in.close();
-    // ---
-    
     // Submit to ResourceManager
     ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
     

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java Sun Sep 18 02:52:44 2011
@@ -139,8 +139,8 @@ public class TestClientRedirect {
     Cluster cluster = new Cluster(conf);
     org.apache.hadoop.mapreduce.JobID jobID =
       new org.apache.hadoop.mapred.JobID("201103121733", 1);
-    org.apache.hadoop.mapreduce.Counters counters = cluster.getJob(jobID)
-        .getCounters();
+    org.apache.hadoop.mapreduce.Counters counters = 
+        cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     Assert.assertTrue(amContact);
    

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java Sun Sep 18 02:52:44 2011
@@ -74,7 +74,7 @@ public class TeraChecksum extends Config
   }
 
   public int run(String[] args) throws Exception {
-    Job job = Job.getInstance(new Cluster(getConf()), getConf());
+    Job job = Job.getInstance(getConf());
     if (args.length != 2) {
       usage();
       return 2;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java Sun Sep 18 02:52:44 2011
@@ -280,7 +280,7 @@ public class TeraGen extends Configured 
    */
   public int run(String[] args) 
       throws IOException, InterruptedException, ClassNotFoundException {
-    Job job = Job.getInstance(new Cluster(getConf()), getConf());
+    Job job = Job.getInstance(getConf());
     if (args.length != 2) {
       usage();
       return 2;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java Sun Sep 18 02:52:44 2011
@@ -280,7 +280,7 @@ public class TeraSort extends Configured
 
   public int run(String[] args) throws Exception {
     LOG.info("starting");
-    Job job = Job.getInstance(new Cluster(getConf()), getConf());
+    Job job = Job.getInstance(getConf());
     Path inputDir = new Path(args[0]);
     Path outputDir = new Path(args[1]);
     boolean useSimplePartitioner = getUseSimplePartitioner(job);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java Sun Sep 18 02:52:44 2011
@@ -157,7 +157,7 @@ public class TeraValidate extends Config
   }
 
   public int run(String[] args) throws Exception {
-    Job job = Job.getInstance(new Cluster(getConf()), getConf());
+    Job job = Job.getInstance(getConf());
     if (args.length != 2) {
       usage();
       return 1;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java Sun Sep 18 02:52:44 2011
@@ -208,7 +208,7 @@ public class TestClusterStatus extends T
     Configuration conf = mr.createJobConf();
     conf.setInt(JobContext.NUM_MAPS, 1);
 
-    Job job = Job.getInstance(cluster, conf);
+    Job job = Job.getInstance(conf);
     job.setNumReduceTasks(1);
     job.setSpeculativeExecution(false);
     job.setJobSetupCleanupNeeded(false);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java Sun Sep 18 02:52:44 2011
@@ -199,7 +199,7 @@ public class TestJobCounters {
 
   public static Job createJob() throws IOException {
     final Configuration conf = new Configuration();
-    final Job baseJob = Job.getInstance(new Cluster(conf), conf);
+    final Job baseJob = Job.getInstance(conf);
     baseJob.setOutputKeyClass(Text.class);
     baseJob.setOutputValueClass(IntWritable.class);
     baseJob.setMapperClass(NewMapTokenizer.class);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java Sun Sep 18 02:52:44 2011
@@ -298,7 +298,7 @@ public class TestMapCollection {
       throws Exception {
     Configuration conf = new Configuration();
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
-    Job job = Job.getInstance(new Cluster(conf), conf);
+    Job job = Job.getInstance(conf);
     conf = job.getConfiguration();
     conf.setInt(MRJobConfig.IO_SORT_MB, ioSortMB);
     conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(spillPer));
@@ -409,7 +409,7 @@ public class TestMapCollection {
     // no writes into the serialization buffer
     Configuration conf = new Configuration();
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
-    Job job = Job.getInstance(new Cluster(conf), conf);
+    Job job = Job.getInstance(conf);
     conf = job.getConfiguration();
     conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     // 2^20 * spill = 14336 bytes available post-spill, at most 896 meta
@@ -427,7 +427,7 @@ public class TestMapCollection {
   public void testLargeRecConcurrent() throws Exception {
     Configuration conf = new Configuration();
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
-    Job job = Job.getInstance(new Cluster(conf), conf);
+    Job job = Job.getInstance(conf);
     conf = job.getConfiguration();
     conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(.986328125f));
@@ -496,7 +496,7 @@ public class TestMapCollection {
   public void testRandom() throws Exception {
     Configuration conf = new Configuration();
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
-    Job job = Job.getInstance(new Cluster(conf), conf);
+    Job job = Job.getInstance(conf);
     conf = job.getConfiguration();
     conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     conf.setClass("test.mapcollection.class", RandomFactory.class,
@@ -517,7 +517,7 @@ public class TestMapCollection {
   public void testRandomCompress() throws Exception {
     Configuration conf = new Configuration();
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
-    Job job = Job.getInstance(new Cluster(conf), conf);
+    Job job = Job.getInstance(conf);
     conf = job.getConfiguration();
     conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java?rev=1172172&r1=1172171&r2=1172172&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java Sun Sep 18 02:52:44 2011
@@ -234,12 +234,11 @@ public class TestTrackerDistributedCache
     }
     TrackerDistributedCacheManager manager = 
       new FakeTrackerDistributedCacheManager(conf);
-    Cluster cluster = new Cluster(conf);
     String userName = getJobOwnerName();
     File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString());
 
     // Configures a job with a regular file
-    Job job1 = Job.getInstance(cluster, conf);
+    Job job1 = Job.getInstance(conf);
     job1.setUser(userName);
     job1.addCacheFile(secondCacheFile.toUri());
     Configuration conf1 = job1.getConfiguration();
@@ -262,7 +261,7 @@ public class TestTrackerDistributedCache
     createPrivateTempFile(thirdCacheFile);
     
     // Configures another job with three regular files.
-    Job job2 = Job.getInstance(cluster, conf);
+    Job job2 = Job.getInstance(conf);
     job2.setUser(userName);
     // add a file that would get failed to localize
     job2.addCacheFile(firstCacheFile.toUri());
@@ -366,7 +365,6 @@ public class TestTrackerDistributedCache
   throws IOException, LoginException, InterruptedException {
     TrackerDistributedCacheManager manager = 
       new TrackerDistributedCacheManager(conf, taskController);
-    Cluster cluster = new Cluster(conf);
     String userName = getJobOwnerName();
     File workDir = new File(TEST_ROOT_DIR, "workdir");
     Path cacheFile = new Path(TEST_ROOT_DIR, "fourthcachefile");
@@ -376,7 +374,7 @@ public class TestTrackerDistributedCache
       createPrivateTempFile(cacheFile);
     }
     
-    Job job1 = Job.getInstance(cluster, conf);
+    Job job1 = Job.getInstance(conf);
     job1.setUser(userName);
     job1.addCacheFile(cacheFile.toUri());
     Configuration conf1 = job1.getConfiguration();