You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2012/02/10 02:49:30 UTC

svn commit: r1242635 [3/10] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ bin/ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/s...

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java Fri Feb 10 01:49:08 2012
@@ -24,6 +24,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.Calendar;
+import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -46,6 +47,9 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+
 public class JobHistoryUtils {
   
   /**
@@ -110,6 +114,9 @@ public class JobHistoryUtils {
   public static final Pattern TIMESTAMP_DIR_PATTERN = Pattern.compile(TIMESTAMP_DIR_REGEX);
   private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d";
 
+  private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
+  private static final Joiner JOINER = Joiner.on("");
+
   private static final PathFilter CONF_FILTER = new PathFilter() {
     @Override
     public boolean accept(Path path) {
@@ -478,8 +485,16 @@ public class JobHistoryUtils {
   public static String getHistoryUrl(Configuration conf, ApplicationId appId) 
        throws UnknownHostException {
   //construct the history url for job
-    String hsAddress = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+    String addr = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
         JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
+    Iterator<String> it = ADDR_SPLITTER.split(addr).iterator();
+    it.next(); // ignore the bind host
+    String port = it.next();
+    // Use hs address to figure out the host for webapp
+    addr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
+        JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
+    String host = ADDR_SPLITTER.split(addr).iterator().next();
+    String hsAddress = JOINER.join(host, ":", port);
     InetSocketAddress address = NetUtils.createSocketAddr(
       hsAddress, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT,
       JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Fri Feb 10 01:49:08 2012
@@ -227,15 +227,23 @@ public class MRApps extends Apps {
   
   public static void setClasspath(Map<String, String> environment,
       Configuration conf) throws IOException {
+    boolean userClassesTakesPrecedence = 
+      conf.getBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);
+
+    if (!userClassesTakesPrecedence) {
+      MRApps.setMRFrameworkClasspath(environment, conf);
+    }
     Apps.addToEnvironment(
-        environment, 
-        Environment.CLASSPATH.name(), 
+        environment,
+        Environment.CLASSPATH.name(),
         MRJobConfig.JOB_JAR);
     Apps.addToEnvironment(
-        environment, 
+        environment,
         Environment.CLASSPATH.name(),
         Environment.PWD.$() + Path.SEPARATOR + "*");
-    MRApps.setMRFrameworkClasspath(environment, conf);
+    if (userClassesTakesPrecedence) {
+      MRApps.setMRFrameworkClasspath(environment, conf);
+    }
   }
   
   private static final String STAGING_CONSTANT = ".staging";

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java Fri Feb 10 01:49:08 2012
@@ -130,13 +130,43 @@ public class TestMRApps {
     Job job = Job.getInstance();
     Map<String, String> environment = new HashMap<String, String>();
     MRApps.setClasspath(environment, job.getConfiguration());
-    assertEquals("job.jar:$PWD/*:$HADOOP_CONF_DIR:" +
+    assertEquals("$HADOOP_CONF_DIR:" +
         "$HADOOP_COMMON_HOME/share/hadoop/common/*:" +
         "$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:" +
         "$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:" +
         "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:" +
         "$YARN_HOME/share/hadoop/mapreduce/*:" +
-        "$YARN_HOME/share/hadoop/mapreduce/lib/*",
+        "$YARN_HOME/share/hadoop/mapreduce/lib/*:" +
+        "job.jar:$PWD/*",
         environment.get("CLASSPATH"));
   }
+
+ @Test public void testSetClasspathWithUserPrecendence() {
+    Configuration conf = new Configuration();
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
+    Map<String, String> env = new HashMap<String, String>();
+    try {
+      MRApps.setClasspath(env, conf);
+    } catch (Exception e) {
+      fail("Got exception while setting classpath");
+    }
+    String env_str = env.get("CLASSPATH");
+    assertSame("MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!",
+      env_str.indexOf("job.jar"), 0);
+  }
+
+  @Test public void testSetClasspathWithNoUserPrecendence() {
+    Configuration conf = new Configuration();
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);
+    Map<String, String> env = new HashMap<String, String>();
+    try {
+      MRApps.setClasspath(env, conf);
+    } catch (Exception e) {
+      fail("Got exception while setting classpath");
+    }
+    String env_str = env.get("CLASSPATH");
+    assertNotSame("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!",
+      env_str.indexOf("job.jar"), 0);
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/DistributedCache.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/DistributedCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/DistributedCache.java Fri Feb 10 01:49:08 2012
@@ -119,9 +119,7 @@ import org.apache.hadoop.mapreduce.Job;
  * @see org.apache.hadoop.mapred.JobConf
  * @see org.apache.hadoop.mapred.JobClient
  * @see org.apache.hadoop.mapreduce.Job
- * @deprecated Use methods on {@link Job}.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class DistributedCache extends

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/package-info.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/package-info.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/package-info.java Fri Feb 10 01:49:08 2012
@@ -16,8 +16,4 @@
  * limitations under the License.
  * 
  */ 
-/** 
- * <b>Deprecated.</b> Use {@link org.apache.hadoop.mapreduce.Job} instead.
- */
-@Deprecated
 package org.apache.hadoop.filecache;

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java Fri Feb 10 01:49:08 2012
@@ -62,9 +62,7 @@ import org.apache.hadoop.mapreduce.Clust
  * {@link JobClient#getClusterStatus()}.</p>
  * 
  * @see JobClient
- * @deprecated  Use {@link ClusterMetrics} or {@link TaskTrackerInfo} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class ClusterStatus implements Writable {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java Fri Feb 10 01:49:08 2012
@@ -18,27 +18,33 @@
 
 package org.apache.hadoop.mapred;
 
+import static org.apache.hadoop.mapreduce.util.CountersStrings.parseEscapedCompactString;
+import static org.apache.hadoop.mapreduce.util.CountersStrings.toEscapedCompactString;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.text.ParseException;
+import java.util.Collection;
+import java.util.Iterator;
 
+import org.apache.commons.collections.IteratorUtils;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.counters.AbstractCounterGroup;
 import org.apache.hadoop.mapreduce.counters.AbstractCounters;
 import org.apache.hadoop.mapreduce.counters.CounterGroupBase;
 import org.apache.hadoop.mapreduce.counters.CounterGroupFactory;
-import org.apache.hadoop.mapreduce.counters.FrameworkCounterGroup;
 import org.apache.hadoop.mapreduce.counters.FileSystemCounterGroup;
+import org.apache.hadoop.mapreduce.counters.FrameworkCounterGroup;
 import org.apache.hadoop.mapreduce.counters.GenericCounter;
 import org.apache.hadoop.mapreduce.counters.Limits;
-import static org.apache.hadoop.mapreduce.util.CountersStrings.*;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
+import org.apache.hadoop.mapreduce.util.CountersStrings;
+
+import com.google.common.collect.Iterators;
 
 /**
  * A set of named counters.
@@ -49,14 +55,14 @@ import static org.apache.hadoop.mapreduc
  *
  * <p><code>Counters</code> are bunched into {@link Group}s, each comprising of
  * counters from a particular <code>Enum</code> class.
- * @deprecated Use {@link org.apache.hadoop.mapreduce.Counters} instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class Counters
     extends AbstractCounters<Counters.Counter, Counters.Group> {
-
+  
+  public static int MAX_COUNTER_LIMIT = Limits.COUNTERS_MAX;
+  
   public Counters() {
     super(groupFactory);
   }
@@ -74,17 +80,82 @@ public class Counters
     return new Counters(newCounters);
   }
 
+  public synchronized Group getGroup(String groupName) {
+    return super.getGroup(groupName);
+  }
+
+  @SuppressWarnings("unchecked")
+  public synchronized Collection<String> getGroupNames() {
+    return IteratorUtils.toList(super.getGroupNames().iterator());
+  }
+
+  public synchronized String makeCompactString() {
+    return CountersStrings.toEscapedCompactString(this);
+  }
+  
   /**
    * A counter record, comprising its name and value.
    */
-  public interface Counter extends org.apache.hadoop.mapreduce.Counter {
+  public static class Counter implements org.apache.hadoop.mapreduce.Counter {
+    org.apache.hadoop.mapreduce.Counter realCounter;
+
+    Counter(org.apache.hadoop.mapreduce.Counter counter) {
+      this.realCounter = counter;
+    }
+
+    public Counter() {
+      this(new GenericCounter());
+    }
+
+    @SuppressWarnings("deprecation")
+    @Override
+    public void setDisplayName(String displayName) {
+      realCounter.setDisplayName(displayName);
+    }
+
+    @Override
+    public String getName() {
+      return realCounter.getName();
+    }
+
+    @Override
+    public String getDisplayName() {
+      return realCounter.getDisplayName();
+    }
+
+    @Override
+    public long getValue() {
+      return realCounter.getValue();
+    }
+
+    @Override
+    public void setValue(long value) {
+      realCounter.setValue(value);
+    }
+
+    @Override
+    public void increment(long incr) {
+      realCounter.increment(incr);
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+      realCounter.write(out);
+    }
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+      realCounter.readFields(in);
+    }
 
     /**
      * Returns the compact stringified version of the counter in the format
      * [(actual-name)(display-name)(value)]
      * @return the stringified result
      */
-    String makeEscapedCompactString();
+    public String makeEscapedCompactString() {
+      return toEscapedCompactString(realCounter);
+    }
 
     /**
      * Checks for (content) equality of two (basic) counters
@@ -93,39 +164,42 @@ public class Counters
      * @deprecated
      */
     @Deprecated
-    boolean contentEquals(Counter counter);
+    public boolean contentEquals(Counter counter) {
+      return realCounter.equals(counter.getUnderlyingCounter());
+    }
 
     /**
      * @return the value of the counter
      */
-    long getCounter();
-  }
-
-  static class OldCounterImpl extends GenericCounter implements Counter {
-
-    OldCounterImpl() {
-    }
-
-    OldCounterImpl(String name, String displayName, long value) {
-      super(name, displayName, value);
+    public long getCounter() {
+      return realCounter.getValue();
     }
 
     @Override
-    public synchronized String makeEscapedCompactString() {
-      return toEscapedCompactString(this);
+    public org.apache.hadoop.mapreduce.Counter getUnderlyingCounter() {
+      return realCounter;
     }
-
-    @Override @Deprecated
-    public boolean contentEquals(Counter counter) {
-      return equals(counter);
+    
+    @Override
+    public synchronized boolean equals(Object genericRight) {
+      if (genericRight instanceof Counter) {
+        synchronized (genericRight) {
+          Counter right = (Counter) genericRight;
+          return getName().equals(right.getName()) &&
+                 getDisplayName().equals(right.getDisplayName()) &&
+                 getValue() == right.getValue();
+        }
+      }
+      return false;
     }
-
+    
     @Override
-    public long getCounter() {
-      return getValue();
+    public int hashCode() {
+      return realCounter.hashCode();
     }
   }
 
+
   /**
    *  <code>Group</code> of counters, comprising of counters from a particular
    *  counter {@link Enum} class.
@@ -133,21 +207,38 @@ public class Counters
    *  <p><code>Group</code>handles localization of the class name and the
    *  counter names.</p>
    */
-  public static interface Group extends CounterGroupBase<Counter> {
-
+  public static class Group implements CounterGroupBase<Counter> {
+    private CounterGroupBase<Counter> realGroup;
+    
+    Group(GenericGroup group) {
+      this.realGroup = group;
+    }
+    Group(FSGroupImpl group) {
+      this.realGroup = group;
+    }
+    
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    Group(FrameworkGroupImpl group) {
+      this.realGroup = group;
+    }
+    
     /**
      * @param counterName the name of the counter
      * @return the value of the specified counter, or 0 if the counter does
      * not exist.
      */
-    long getCounter(String counterName);
+    public long getCounter(String counterName)  {
+      return getCounterValue(realGroup, counterName);
+    }
 
     /**
      * @return the compact stringified version of the group in the format
      * {(actual-name)(display-name)(value)[][][]} where [] are compact strings
      * for the counters within.
      */
-    String makeEscapedCompactString();
+    public String makeEscapedCompactString() {
+      return toEscapedCompactString(realGroup);
+    }
 
     /**
      * Get the counter for the given id and create it if it doesn't exist.
@@ -157,172 +248,184 @@ public class Counters
      * @deprecated use {@link #findCounter(String)} instead
      */
     @Deprecated
-    Counter getCounter(int id, String name);
+    public Counter getCounter(int id, String name) {
+      return findCounter(name);
+    }
 
     /**
      * Get the counter for the given name and create it if it doesn't exist.
      * @param name the internal counter name
      * @return the counter
      */
-    Counter getCounterForName(String name);
-  }
-
-  // All the group impls need this for legacy group interface
-  static long getCounterValue(Group group, String counterName) {
-    Counter counter = group.findCounter(counterName, false);
-    if (counter != null) return counter.getValue();
-    return 0L;
-  }
-
-  // Mix the generic group implementation into the Group interface
-  private static class GenericGroup extends AbstractCounterGroup<Counter>
-                                    implements Group {
-
-    GenericGroup(String name, String displayName, Limits limits) {
-      super(name, displayName, limits);
+    public Counter getCounterForName(String name) {
+      return findCounter(name);
     }
 
     @Override
-    public long getCounter(String counterName) {
-      return getCounterValue(this, counterName);
+    public void write(DataOutput out) throws IOException {
+     realGroup.write(out); 
     }
 
     @Override
-    public String makeEscapedCompactString() {
-      return toEscapedCompactString(this);
+    public void readFields(DataInput in) throws IOException {
+      realGroup.readFields(in);
     }
 
     @Override
-    public Counter getCounter(int id, String name) {
-      return findCounter(name);
+    public Iterator<Counter> iterator() {
+      return realGroup.iterator();
     }
 
     @Override
-    public Counter getCounterForName(String name) {
-      return findCounter(name);
+    public String getName() {
+      return realGroup.getName();
     }
 
     @Override
-    protected Counter newCounter(String counterName, String displayName,
-                                 long value) {
-      return new OldCounterImpl(counterName, displayName, value);
+    public String getDisplayName() {
+      return realGroup.getDisplayName();
     }
 
     @Override
-    protected Counter newCounter() {
-      return new OldCounterImpl();
+    public void setDisplayName(String displayName) {
+      realGroup.setDisplayName(displayName);
     }
-  }
 
-  // Mix the framework group implementation into the Group interface
-  private static class FrameworkGroupImpl<T extends Enum<T>>
-      extends FrameworkCounterGroup<T, Counter> implements Group {
-
-    // Mix the framework counter implmementation into the Counter interface
-    class FrameworkCounterImpl extends FrameworkCounter implements Counter {
-
-      FrameworkCounterImpl(T key) {
-        super(key);
-      }
-
-      @Override
-      public String makeEscapedCompactString() {
-        return toEscapedCompactString(this);
-      }
+    @Override
+    public void addCounter(Counter counter) {
+      realGroup.addCounter(counter);
+    }
 
-      @Override
-      public boolean contentEquals(Counter counter) {
-        return equals(counter);
-      }
+    @Override
+    public Counter addCounter(String name, String displayName, long value) {
+      return realGroup.addCounter(name, displayName, value);
+    }
 
-      @Override
-      public long getCounter() {
-        return getValue();
-      }
+    @Override
+    public Counter findCounter(String counterName, String displayName) {
+      return realGroup.findCounter(counterName, displayName);
     }
 
-    FrameworkGroupImpl(Class<T> cls) {
-      super(cls);
+    @Override
+    public Counter findCounter(String counterName, boolean create) {
+      return realGroup.findCounter(counterName, create);
     }
 
     @Override
-    public long getCounter(String counterName) {
-      return getCounterValue(this, counterName);
+    public Counter findCounter(String counterName) {
+      return realGroup.findCounter(counterName);
     }
 
     @Override
-    public String makeEscapedCompactString() {
-      return toEscapedCompactString(this);
+    public int size() {
+      return realGroup.size();
     }
 
-    @Override @Deprecated
-    public Counter getCounter(int id, String name) {
-      return findCounter(name);
+    @Override
+    public void incrAllCounters(CounterGroupBase<Counter> rightGroup) {
+      realGroup.incrAllCounters(rightGroup);
+    }
+    
+    @Override
+    public CounterGroupBase<Counter> getUnderlyingGroup() {
+      return realGroup;
     }
 
     @Override
-    public Counter getCounterForName(String name) {
-      return findCounter(name);
+    public synchronized boolean equals(Object genericRight) {
+      if (genericRight instanceof CounterGroupBase<?>) {
+        @SuppressWarnings("unchecked")
+        CounterGroupBase<Counter> right = ((CounterGroupBase<Counter>) 
+        genericRight).getUnderlyingGroup();
+        return Iterators.elementsEqual(iterator(), right.iterator());
+      }
+      return false;
     }
 
     @Override
-    protected Counter newCounter(T key) {
-      return new FrameworkCounterImpl(key);
+    public int hashCode() {
+      return realGroup.hashCode();
     }
   }
 
-  // Mix the file system counter group implementation into the Group interface
-  private static class FSGroupImpl extends FileSystemCounterGroup<Counter>
-                                   implements Group {
+  // All the group impls need this for legacy group interface
+  static long getCounterValue(CounterGroupBase<Counter> group, String counterName) {
+    Counter counter = group.findCounter(counterName, false);
+    if (counter != null) return counter.getValue();
+    return 0L;
+  }
 
-    private class FSCounterImpl extends FSCounter implements Counter {
+  // Mix the generic group implementation into the Group interface
+  private static class GenericGroup extends AbstractCounterGroup<Counter> {
 
-      FSCounterImpl(String scheme, FileSystemCounter key) {
-        super(scheme, key);
-      }
+    GenericGroup(String name, String displayName, Limits limits) {
+      super(name, displayName, limits);
+    }
 
-      @Override
-      public String makeEscapedCompactString() {
-        return toEscapedCompactString(this);
-      }
+    @Override
+    protected Counter newCounter(String counterName, String displayName,
+                                 long value) {
+      return new Counter(new GenericCounter(counterName, displayName, value));
+    }
 
-      @Override @Deprecated
-      public boolean contentEquals(Counter counter) {
-        throw new UnsupportedOperationException("Not supported yet.");
-      }
+    @Override
+    protected Counter newCounter() {
+      return new Counter();
+    }
+    
+    @Override
+    public CounterGroupBase<Counter> getUnderlyingGroup() {
+     return this;
+    }
+  }
 
-      @Override
-      public long getCounter() {
-        return getValue();
+  // Mix the framework group implementation into the Group interface
+  private static class FrameworkGroupImpl<T extends Enum<T>>
+      extends FrameworkCounterGroup<T, Counter> {
+
+    // Mix the framework counter implementation into the Counter interface
+    class FrameworkCounterImpl extends FrameworkCounter {
+      FrameworkCounterImpl(T key) {
+        super(key);
       }
 
     }
 
-    @Override
-    protected Counter newCounter(String scheme, FileSystemCounter key) {
-      return new FSCounterImpl(scheme, key);
+    FrameworkGroupImpl(Class<T> cls) {
+      super(cls);
     }
 
     @Override
-    public long getCounter(String counterName) {
-      return getCounterValue(this, counterName);
+    protected Counter newCounter(T key) {
+      return new Counter(new FrameworkCounterImpl(key));
     }
 
     @Override
-    public String makeEscapedCompactString() {
-      return toEscapedCompactString(this);
+    public CounterGroupBase<Counter> getUnderlyingGroup() {
+      return this;
     }
+  }
+
+  // Mix the file system counter group implementation into the Group interface
+  private static class FSGroupImpl extends FileSystemCounterGroup<Counter> {
+
+    private class FSCounterImpl extends FSCounter {
+
+      FSCounterImpl(String scheme, FileSystemCounter key) {
+        super(scheme, key);
+      }
 
-    @Override @Deprecated
-    public Counter getCounter(int id, String name) {
-      return findCounter(name);
     }
 
     @Override
-    public Counter getCounterForName(String name) {
-      return findCounter(name);
+    protected Counter newCounter(String scheme, FileSystemCounter key) {
+      return new Counter(new FSCounterImpl(scheme, key));
     }
 
+    @Override
+    public CounterGroupBase<Counter> getUnderlyingGroup() {
+      return this;
+    }
   }
 
   public synchronized Counter findCounter(String group, String name) {
@@ -347,7 +450,7 @@ public class Counters
     FrameworkGroupFactory<Group> newFrameworkGroupFactory(final Class<T> cls) {
       return new FrameworkGroupFactory<Group>() {
         @Override public Group newGroup(String name) {
-          return new FrameworkGroupImpl<T>(cls); // impl in this package
+          return new Group(new FrameworkGroupImpl<T>(cls)); // impl in this package
         }
       };
     }
@@ -355,12 +458,12 @@ public class Counters
     @Override
     protected Group newGenericGroup(String name, String displayName,
                                     Limits limits) {
-      return new GenericGroup(name, displayName, limits);
+      return new Group(new GenericGroup(name, displayName, limits));
     }
 
     @Override
     protected Group newFileSystemGroup() {
-      return new FSGroupImpl();
+      return new Group(new FSGroupImpl());
     }
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileAlreadyExistsException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileAlreadyExistsException.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileAlreadyExistsException.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileAlreadyExistsException.java Fri Feb 10 01:49:08 2012
@@ -27,7 +27,6 @@ import org.apache.hadoop.classification.
  * Used when target file already exists for any operation and 
  * is not configured to be overwritten.  
  */
-@Deprecated // may be removed after 0.23
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class FileAlreadyExistsException

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java Fri Feb 10 01:49:08 2012
@@ -54,10 +54,7 @@ import org.apache.hadoop.util.StringUtil
  * Subclasses of <code>FileInputFormat</code> can also override the 
  * {@link #isSplitable(FileSystem, Path)} method to ensure input-files are
  * not split-up and are processed as a whole by {@link Mapper}s.
- * @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}
- *  instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileOutputCommitter.java Fri Feb 10 01:49:08 2012
@@ -19,14 +19,12 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.net.URI;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 /** An {@link OutputCommitter} that commits files specified 
@@ -42,280 +40,147 @@ public class FileOutputCommitter extends
   /**
    * Temporary directory name 
    */
-  public static final String TEMP_DIR_NAME = "_temporary";
-  public static final String SUCCEEDED_FILE_NAME = "_SUCCESS";
-  static final String SUCCESSFUL_JOB_OUTPUT_DIR_MARKER = 
-    "mapreduce.fileoutputcommitter.marksuccessfuljobs";
-
-  public void setupJob(JobContext context) throws IOException {
+  public static final String TEMP_DIR_NAME = 
+    org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter.PENDING_DIR_NAME;
+  public static final String SUCCEEDED_FILE_NAME = 
+    org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter.SUCCEEDED_FILE_NAME;
+  static final String SUCCESSFUL_JOB_OUTPUT_DIR_MARKER =
+    org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter.SUCCESSFUL_JOB_OUTPUT_DIR_MARKER;
+  
+  private static Path getOutputPath(JobContext context) {
     JobConf conf = context.getJobConf();
-    Path outputPath = FileOutputFormat.getOutputPath(conf);
-    if (outputPath != null) {
-      Path tmpDir = 
-          new Path(outputPath, getJobAttemptBaseDirName(context) + 
-              Path.SEPARATOR + FileOutputCommitter.TEMP_DIR_NAME);
-      FileSystem fileSys = tmpDir.getFileSystem(conf);
-      if (!fileSys.mkdirs(tmpDir)) {
-        LOG.error("Mkdirs failed to create " + tmpDir.toString());
-      }
-    }
-  }
-
-  // True if the job requires output.dir marked on successful job.
-  // Note that by default it is set to true.
-  private boolean shouldMarkOutputDir(JobConf conf) {
-    return conf.getBoolean(SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true);
+    return FileOutputFormat.getOutputPath(conf);
   }
   
-  public void commitJob(JobContext context) throws IOException {
-    //delete the task temp directory from the current jobtempdir
+  private static Path getOutputPath(TaskAttemptContext context) {
     JobConf conf = context.getJobConf();
-    Path outputPath = FileOutputFormat.getOutputPath(conf);
-    if (outputPath != null) {
-      FileSystem outputFileSystem = outputPath.getFileSystem(conf);
-      Path tmpDir = new Path(outputPath, getJobAttemptBaseDirName(context) +
-          Path.SEPARATOR + FileOutputCommitter.TEMP_DIR_NAME);
-      FileSystem fileSys = tmpDir.getFileSystem(context.getConfiguration());
-      if (fileSys.exists(tmpDir)) {
-        fileSys.delete(tmpDir, true);
-      } else {
-        LOG.warn("Task temp dir could not be deleted " + tmpDir);
-      }
-
-      //move the job output to final place
-      Path jobOutputPath = 
-          new Path(outputPath, getJobAttemptBaseDirName(context));
-      moveJobOutputs(outputFileSystem, 
-          jobOutputPath, outputPath, jobOutputPath);
-
-      // delete the _temporary folder in the output folder
-      cleanupJob(context);
-      // check if the output-dir marking is required
-      if (shouldMarkOutputDir(context.getJobConf())) {
-        // create a _success file in the output folder
-        markOutputDirSuccessful(context);
-      }
+    return FileOutputFormat.getOutputPath(conf);
+  }
+  
+  private org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter wrapped = null;
+  
+  private org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter 
+  getWrapped(JobContext context) throws IOException {
+    if(wrapped == null) {
+      wrapped = new org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter(
+          getOutputPath(context), context);
     }
+    return wrapped;
   }
   
-  // Create a _success file in the job's output folder
-  private void markOutputDirSuccessful(JobContext context) throws IOException {
-    JobConf conf = context.getJobConf();
-    // get the o/p path
-    Path outputPath = FileOutputFormat.getOutputPath(conf);
-    if (outputPath != null) {
-      // get the filesys
-      FileSystem fileSys = outputPath.getFileSystem(conf);
-      // create a file in the output folder to mark the job completion
-      Path filePath = new Path(outputPath, SUCCEEDED_FILE_NAME);
-      fileSys.create(filePath).close();
+  private org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter 
+  getWrapped(TaskAttemptContext context) throws IOException {
+    if(wrapped == null) {
+      wrapped = new org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter(
+          getOutputPath(context), context);
     }
+    return wrapped;
   }
-
-  private void moveJobOutputs(FileSystem fs, final Path origJobOutputPath,
-      Path finalOutputDir, Path jobOutput) throws IOException {
-    LOG.debug("Told to move job output from " + jobOutput
-        + " to " + finalOutputDir + 
-        " and orig job output path is " + origJobOutputPath);  
-    if (fs.isFile(jobOutput)) {
-      Path finalOutputPath = 
-          getFinalPath(fs, finalOutputDir, jobOutput, origJobOutputPath);
-      if (!fs.rename(jobOutput, finalOutputPath)) {
-        if (!fs.delete(finalOutputPath, true)) {
-          throw new IOException("Failed to delete earlier output of job");
-        }
-        if (!fs.rename(jobOutput, finalOutputPath)) {
-          throw new IOException("Failed to save output of job");
-        }
-      }
-      LOG.debug("Moved job output file from " + jobOutput + " to " + 
-          finalOutputPath);
-    } else if (fs.getFileStatus(jobOutput).isDirectory()) {
-      LOG.debug("Job output file " + jobOutput + " is a dir");      
-      FileStatus[] paths = fs.listStatus(jobOutput);
-      Path finalOutputPath = 
-          getFinalPath(fs, finalOutputDir, jobOutput, origJobOutputPath);
-      fs.mkdirs(finalOutputPath);
-      LOG.debug("Creating dirs along job output path " + finalOutputPath);
-      if (paths != null) {
-        for (FileStatus path : paths) {
-          moveJobOutputs(fs, origJobOutputPath, finalOutputDir, path.getPath());
-        }
-      }
+  
+  /**
+   * Compute the path where the output of a given job attempt will be placed. 
+   * @param context the context of the job.  This is used to get the
+   * application attempt id.
+   * @return the path to store job attempt data.
+   */
+  @Private
+  Path getJobAttemptPath(JobContext context) {
+    Path out = getOutputPath(context);
+    return out == null ? null : 
+      org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+        .getJobAttemptPath(context, out);
+  }
+
+  @Private
+  Path getTaskAttemptPath(TaskAttemptContext context) throws IOException {
+    Path out = getOutputPath(context);
+    return out == null ? null : getTaskAttemptPath(context, out);
+  }
+
+  private Path getTaskAttemptPath(TaskAttemptContext context, Path out) throws IOException {
+    Path workPath = FileOutputFormat.getWorkOutputPath(context.getJobConf());
+    if(workPath == null && out != null) {
+      return org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+      .getTaskAttemptPath(context, out);
     }
+    return workPath;
+  }
+  
+  /**
+   * Compute the path where the output of a committed task is stored until
+   * the entire job is committed.
+   * @param context the context of the task attempt
+   * @return the path where the output of a committed task is stored until
+   * the entire job is committed.
+   */
+  @Private
+  Path getCommittedTaskPath(TaskAttemptContext context) {
+    Path out = getOutputPath(context);
+    return out == null ? null : 
+      org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+        .getCommittedTaskPath(context, out);
+  }
+
+  public Path getWorkPath(TaskAttemptContext context, Path outputPath) 
+  throws IOException {
+    return outputPath == null ? null : getTaskAttemptPath(context, outputPath);
+  }
+  
+  @Override
+  public void setupJob(JobContext context) throws IOException {
+    getWrapped(context).setupJob(context);
+  }
+  
+  @Override
+  public void commitJob(JobContext context) throws IOException {
+    getWrapped(context).commitJob(context);
   }
   
   @Override
   @Deprecated
   public void cleanupJob(JobContext context) throws IOException {
-    JobConf conf = context.getJobConf();
-    // do the clean up of temporary directory
-    Path outputPath = FileOutputFormat.getOutputPath(conf);
-    if (outputPath != null) {
-      Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME);
-      FileSystem fileSys = tmpDir.getFileSystem(conf);
-      context.getProgressible().progress();
-      if (fileSys.exists(tmpDir)) {
-        fileSys.delete(tmpDir, true);
-      } else {
-        LOG.warn("Output Path is Null in cleanup");
-      }
-    }
+    getWrapped(context).cleanupJob(context);
   }
 
   @Override
   public void abortJob(JobContext context, int runState) 
   throws IOException {
-    // simply delete the _temporary dir from the o/p folder of the job
-    cleanupJob(context);
+    JobStatus.State state;
+    if(runState == JobStatus.State.RUNNING.getValue()) {
+      state = JobStatus.State.RUNNING;
+    } else if(runState == JobStatus.State.SUCCEEDED.getValue()) {
+      state = JobStatus.State.SUCCEEDED;
+    } else if(runState == JobStatus.State.FAILED.getValue()) {
+      state = JobStatus.State.FAILED;
+    } else if(runState == JobStatus.State.PREP.getValue()) {
+      state = JobStatus.State.PREP;
+    } else if(runState == JobStatus.State.KILLED.getValue()) {
+      state = JobStatus.State.KILLED;
+    } else {
+      throw new IllegalArgumentException(runState+" is not a valid runState.");
+    }
+    getWrapped(context).abortJob(context, state);
   }
   
+  @Override
   public void setupTask(TaskAttemptContext context) throws IOException {
-    // FileOutputCommitter's setupTask doesn't do anything. Because the
-    // temporary task directory is created on demand when the 
-    // task is writing.
+    getWrapped(context).setupTask(context);
   }
-		  
-  public void commitTask(TaskAttemptContext context) 
-  throws IOException {
-    Path taskOutputPath = getTempTaskOutputPath(context);
-    TaskAttemptID attemptId = context.getTaskAttemptID();
-    JobConf job = context.getJobConf();
-    if (taskOutputPath != null) {
-      FileSystem fs = taskOutputPath.getFileSystem(job);
-      context.getProgressible().progress();
-      if (fs.exists(taskOutputPath)) {
-        // Move the task outputs to the current job attempt output dir
-        JobConf conf = context.getJobConf();
-        Path outputPath = FileOutputFormat.getOutputPath(conf);
-        FileSystem outputFileSystem = outputPath.getFileSystem(conf);
-        Path jobOutputPath = new Path(outputPath, getJobTempDirName(context));
-        moveTaskOutputs(context, outputFileSystem, jobOutputPath, 
-            taskOutputPath);
-
-        // Delete the temporary task-specific output directory
-        if (!fs.delete(taskOutputPath, true)) {
-          LOG.info("Failed to delete the temporary output" + 
-          " directory of task: " + attemptId + " - " + taskOutputPath);
-        }
-        LOG.info("Saved output of task '" + attemptId + "' to " + 
-                 jobOutputPath);
-      }
-    }
-  }
-		  
-  private void moveTaskOutputs(TaskAttemptContext context,
-                               FileSystem fs,
-                               Path jobOutputDir,
-                               Path taskOutput) 
-  throws IOException {
-    TaskAttemptID attemptId = context.getTaskAttemptID();
-    context.getProgressible().progress();
-    LOG.debug("Told to move taskoutput from " + taskOutput
-        + " to " + jobOutputDir);    
-    if (fs.isFile(taskOutput)) {
-      Path finalOutputPath = getFinalPath(fs, jobOutputDir, taskOutput, 
-                                          getTempTaskOutputPath(context));
-      if (!fs.rename(taskOutput, finalOutputPath)) {
-        if (!fs.delete(finalOutputPath, true)) {
-          throw new IOException("Failed to delete earlier output of task: " + 
-                                 attemptId);
-        }
-        if (!fs.rename(taskOutput, finalOutputPath)) {
-          throw new IOException("Failed to save output of task: " + 
-        		  attemptId);
-        }
-      }
-      LOG.debug("Moved " + taskOutput + " to " + finalOutputPath);
-    } else if(fs.getFileStatus(taskOutput).isDirectory()) {
-      LOG.debug("Taskoutput " + taskOutput + " is a dir");
-      FileStatus[] paths = fs.listStatus(taskOutput);
-      Path finalOutputPath = getFinalPath(fs, jobOutputDir, taskOutput, 
-	          getTempTaskOutputPath(context));
-      fs.mkdirs(finalOutputPath);
-      LOG.debug("Creating dirs along path " + finalOutputPath);
-      if (paths != null) {
-        for (FileStatus path : paths) {
-          moveTaskOutputs(context, fs, jobOutputDir, path.getPath());
-        }
-      }
-    }
+  
+  @Override
+  public void commitTask(TaskAttemptContext context) throws IOException {
+    getWrapped(context).commitTask(context, getTaskAttemptPath(context));
   }
 
+  @Override
   public void abortTask(TaskAttemptContext context) throws IOException {
-    Path taskOutputPath =  getTempTaskOutputPath(context);
-    if (taskOutputPath != null) {
-      FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf());
-      context.getProgressible().progress();
-      fs.delete(taskOutputPath, true);
-    }
-  }
-
-  @SuppressWarnings("deprecation")
-  private Path getFinalPath(FileSystem fs, Path jobOutputDir, Path taskOutput, 
-                            Path taskOutputPath) throws IOException {
-    URI taskOutputUri = taskOutput.makeQualified(fs).toUri();
-    URI taskOutputPathUri = taskOutputPath.makeQualified(fs).toUri();
-    URI relativePath = taskOutputPathUri.relativize(taskOutputUri);
-    if (taskOutputUri == relativePath) { 
-      //taskOutputPath is not a parent of taskOutput
-      throw new IOException("Can not get the relative path: base = " + 
-          taskOutputPathUri + " child = " + taskOutputUri);
-    }
-    if (relativePath.getPath().length() > 0) {
-      return new Path(jobOutputDir, relativePath.getPath());
-    } else {
-      return jobOutputDir;
-    }
+    getWrapped(context).abortTask(context, getTaskAttemptPath(context));
   }
 
+  @Override
   public boolean needsTaskCommit(TaskAttemptContext context) 
   throws IOException {
-    Path taskOutputPath = getTempTaskOutputPath(context);
-    if (taskOutputPath != null) {
-      context.getProgressible().progress();
-      // Get the file-system for the task output directory
-      FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf());
-      // since task output path is created on demand, 
-      // if it exists, task needs a commit
-      if (fs.exists(taskOutputPath)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  Path getTempTaskOutputPath(TaskAttemptContext taskContext) 
-      throws IOException {
-    JobConf conf = taskContext.getJobConf();
-    Path outputPath = FileOutputFormat.getOutputPath(conf);
-    if (outputPath != null) {
-      Path p = new Path(outputPath,
-                     (FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR +
-                      "_" + taskContext.getTaskAttemptID().toString()));
-      FileSystem fs = p.getFileSystem(conf);
-      return p.makeQualified(fs);
-    }
-    return null;
-  }
-  
-  Path getWorkPath(TaskAttemptContext taskContext, Path basePath) 
-  throws IOException {
-    // ${mapred.out.dir}/_temporary
-    Path jobTmpDir = new Path(basePath, FileOutputCommitter.TEMP_DIR_NAME);
-    FileSystem fs = jobTmpDir.getFileSystem(taskContext.getJobConf());
-    if (!fs.exists(jobTmpDir)) {
-      throw new IOException("The temporary job-output directory " + 
-          jobTmpDir.toString() + " doesn't exist!"); 
-    }
-    // ${mapred.out.dir}/_temporary/_${taskid}
-    String taskid = taskContext.getTaskAttemptID().toString();
-    Path taskTmpDir = new Path(jobTmpDir, "_" + taskid);
-    if (!fs.mkdirs(taskTmpDir)) {
-      throw new IOException("Mkdirs failed to create " 
-          + taskTmpDir.toString());
-    }
-    return taskTmpDir;
+    return getWrapped(context).needsTaskCommit(context, getTaskAttemptPath(context));
   }
   
   @Override
@@ -326,54 +191,6 @@ public class FileOutputCommitter extends
   @Override
   public void recoverTask(TaskAttemptContext context)
       throws IOException {
-    Path outputPath = FileOutputFormat.getOutputPath(context.getJobConf());
-    context.progress();
-    Path jobOutputPath = new Path(outputPath, getJobTempDirName(context));
-    int previousAttempt =         
-        context.getConfiguration().getInt(
-            MRConstants.APPLICATION_ATTEMPT_ID, 0) - 1;
-    if (previousAttempt < 0) {
-      LOG.warn("Cannot recover task output for first attempt...");
-      return;
-    }
-
-    FileSystem outputFileSystem = 
-        outputPath.getFileSystem(context.getJobConf());
-    Path pathToRecover = 
-        new Path(outputPath, getJobAttemptBaseDirName(previousAttempt));
-    if (outputFileSystem.exists(pathToRecover)) {
-      // Move the task outputs to their final place
-      LOG.debug("Trying to recover task from " + pathToRecover
-          + " into " + jobOutputPath);
-      moveJobOutputs(outputFileSystem, 
-          pathToRecover, jobOutputPath, pathToRecover);
-      LOG.info("Saved output of job to " + jobOutputPath);
-    }
-  }
-
-  protected static String getJobAttemptBaseDirName(JobContext context) {
-    int appAttemptId = 
-        context.getJobConf().getInt(
-            MRConstants.APPLICATION_ATTEMPT_ID, 0);
-    return getJobAttemptBaseDirName(appAttemptId);
-  }
-
-  protected static String getJobTempDirName(TaskAttemptContext context) {
-    int appAttemptId = 
-        context.getJobConf().getInt(
-            MRConstants.APPLICATION_ATTEMPT_ID, 0);
-    return getJobAttemptBaseDirName(appAttemptId);
-  }
-
-  protected static String getJobAttemptBaseDirName(int appAttemptId) {
-    return FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR + 
-      + appAttemptId;
-  }
-
-  protected static String getTaskAttemptBaseDirName(
-      TaskAttemptContext context) {
-    return getJobTempDirName(context) + Path.SEPARATOR + 
-      FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR +
-      "_" + context.getTaskAttemptID().toString();
+    getWrapped(context).recoverTask(context);
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileSplit.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileSplit.java Fri Feb 10 01:49:08 2012
@@ -29,10 +29,7 @@ import org.apache.hadoop.fs.Path;
 /** A section of an input file.  Returned by {@link
  * InputFormat#getSplits(JobConf, int)} and passed to
  * {@link InputFormat#getRecordReader(InputSplit,JobConf,Reporter)}. 
- * @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileSplit}
- *  instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class FileSplit extends org.apache.hadoop.mapreduce.InputSplit 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ID.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ID.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ID.java Fri Feb 10 01:49:08 2012
@@ -30,7 +30,6 @@ import org.apache.hadoop.classification.
  * @see TaskID
  * @see TaskAttemptID
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class ID extends org.apache.hadoop.mapreduce.ID {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputFormat.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputFormat.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputFormat.java Fri Feb 10 01:49:08 2012
@@ -63,9 +63,7 @@ import org.apache.hadoop.fs.FileSystem;
  * @see RecordReader
  * @see JobClient
  * @see FileInputFormat
- * @deprecated Use {@link org.apache.hadoop.mapreduce.InputFormat} instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface InputFormat<K, V> {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputSplit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputSplit.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputSplit.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InputSplit.java Fri Feb 10 01:49:08 2012
@@ -34,9 +34,7 @@ import org.apache.hadoop.io.Writable;
  * 
  * @see InputFormat
  * @see RecordReader
- * @deprecated Use {@link org.apache.hadoop.mapreduce.InputSplit} instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface InputSplit extends Writable {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Fri Feb 10 01:49:08 2012
@@ -29,6 +29,9 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.ClusterMetrics;
@@ -40,13 +43,10 @@ import org.apache.hadoop.mapreduce.filec
 import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.mapreduce.tools.CLI;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenRenewer;
-import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -132,9 +132,7 @@ import org.apache.hadoop.util.ToolRunner
  * @see ClusterStatus
  * @see Tool
  * @see DistributedCache
- * @deprecated Use {@link Job} and {@link Cluster} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobClient extends CLI {
@@ -147,7 +145,7 @@ public class JobClient extends CLI {
    */
   private boolean getDelegationTokenCalled = false;
   /* notes the renewer that will renew the delegation token */
-  private Text dtRenewer = null;
+  private String dtRenewer = null;
   /* do we need a HS delegation token for this client */
   static final String HS_DELEGATION_TOKEN_REQUIRED 
       = "mapreduce.history.server.delegationtoken.required";
@@ -600,7 +598,7 @@ public class JobClient extends CLI {
       if (getDelegationTokenCalled) {
         conf.setBoolean(HS_DELEGATION_TOKEN_REQUIRED, getDelegationTokenCalled);
         getDelegationTokenCalled = false;
-        conf.set(HS_DELEGATION_TOKEN_RENEWER, dtRenewer.toString());
+        conf.set(HS_DELEGATION_TOKEN_RENEWER, dtRenewer);
         dtRenewer = null;
       }
       Job job = clientUgi.doAs(new PrivilegedExceptionAction<Job> () {
@@ -1204,7 +1202,7 @@ public class JobClient extends CLI {
   public Token<DelegationTokenIdentifier> 
     getDelegationToken(final Text renewer) throws IOException, InterruptedException {
     getDelegationTokenCalled = true;
-    dtRenewer = renewer;
+    dtRenewer = renewer.toString();
     return clientUgi.doAs(new 
         PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>() {
       public Token<DelegationTokenIdentifier> run() throws IOException, 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java Fri Feb 10 01:49:08 2012
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
 
 
 import java.io.IOException;
-
 import java.net.URL;
 import java.net.URLDecoder;
 import java.util.Enumeration;
@@ -28,24 +27,26 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapreduce.filecache.DistributedCache;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-
-import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.compress.CompressionCodec;
-
+import org.apache.hadoop.mapred.lib.HashPartitioner;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
-import org.apache.hadoop.mapred.lib.HashPartitioner;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedComparator;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -107,9 +108,7 @@ import org.apache.log4j.Level;
  * @see ClusterStatus
  * @see Tool
  * @see DistributedCache
- * @deprecated Use {@link Configuration} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobConf extends Configuration {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConfigurable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConfigurable.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConfigurable.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConfigurable.java Fri Feb 10 01:49:08 2012
@@ -22,7 +22,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 
 /** That what may be configured. */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface JobConfigurable {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContext.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContext.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContext.java Fri Feb 10 01:49:08 2012
@@ -22,10 +22,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.Progressable;
 
-/**
- * @deprecated Use {@link org.apache.hadoop.mapreduce.JobContext} instead.
- */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface JobContext extends org.apache.hadoop.mapreduce.JobContext {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContextImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContextImpl.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContextImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobContextImpl.java Fri Feb 10 01:49:08 2012
@@ -21,10 +21,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.Progressable;
 
-/**
- * @deprecated Use {@link org.apache.hadoop.mapreduce.JobContext} instead.
- */
-@Deprecated
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class JobContextImpl 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobID.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobID.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobID.java Fri Feb 10 01:49:08 2012
@@ -41,7 +41,6 @@ import org.apache.hadoop.classification.
  * @see TaskID
  * @see TaskAttemptID
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobID extends org.apache.hadoop.mapreduce.JobID {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobPriority.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobPriority.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobPriority.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobPriority.java Fri Feb 10 01:49:08 2012
@@ -22,9 +22,7 @@ import org.apache.hadoop.classification.
 
 /**
  * Used to describe the priority of the running job. 
- * @deprecated Use {@link org.apache.hadoop.mapreduce.JobPriority} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public enum JobPriority {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java Fri Feb 10 01:49:08 2012
@@ -29,9 +29,7 @@ import org.apache.hadoop.mapreduce.Queue
 /**
  * Class that contains the information regarding the Job Queues which are 
  * maintained by the Hadoop Map/Reduce framework.
- * @deprecated Use {@link QueueInfo} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobQueueInfo extends QueueInfo {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobStatus.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobStatus.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobStatus.java Fri Feb 10 01:49:08 2012
@@ -29,9 +29,7 @@ import org.apache.hadoop.security.author
  * not intended to be a comprehensive piece of data.
  * For that, look at JobProfile.
  *************************************************
- *@deprecated Use {@link org.apache.hadoop.mapreduce.JobStatus} instead
  **/
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobStatus extends org.apache.hadoop.mapreduce.JobStatus {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java Fri Feb 10 01:49:08 2012
@@ -31,12 +31,7 @@ import org.apache.hadoop.io.Text;
  * separator character. The separator can be specified in config file 
  * under the attribute name mapreduce.input.keyvaluelinerecordreader.key.value.separator. The default
  * separator is the tab character ('\t').
- * 
- * @deprecated Use 
- * {@link org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader} 
- * instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class KeyValueLineRecordReader implements RecordReader<Text, Text> {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueTextInputFormat.java Fri Feb 10 01:49:08 2012
@@ -34,12 +34,7 @@ import org.apache.hadoop.io.compress.Spl
  * Either linefeed or carriage-return are used to signal end of line. Each line
  * is divided into key and value parts by a separator byte. If no such a byte
  * exists, the key will be the entire line and value will be empty.
- * 
- * @deprecated Use 
- * {@link org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat} 
- * instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class KeyValueTextInputFormat extends FileInputFormat<Text, Text>

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java Fri Feb 10 01:49:08 2012
@@ -41,10 +41,7 @@ import org.apache.commons.logging.Log;
 
 /**
  * Treats keys as offset in file and value as line. 
- * @deprecated Use 
- *   {@link org.apache.hadoop.mapreduce.lib.input.LineRecordReader} instead.
  */
-@Deprecated
 @InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
 @InterfaceStability.Unstable
 public class LineRecordReader implements RecordReader<LongWritable, Text> {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MROutputFiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MROutputFiles.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MROutputFiles.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MROutputFiles.java Fri Feb 10 01:49:08 2012
@@ -23,7 +23,6 @@ import java.io.IOException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRConfig;

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Fri Feb 10 01:49:08 2012
@@ -36,10 +36,7 @@ import org.apache.hadoop.util.Progressab
 import org.apache.hadoop.util.ReflectionUtils;
 
 /** An {@link OutputFormat} that writes {@link MapFile}s.
- * @deprecated Use 
- * {@link org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat} instead
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class MapFileOutputFormat 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapReduceBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapReduceBase.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapReduceBase.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapReduceBase.java Fri Feb 10 01:49:08 2012
@@ -23,7 +23,6 @@ import java.io.IOException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Closeable;
-import org.apache.hadoop.mapred.JobConfigurable;
 
 /** 
  * Base class for {@link Mapper} and {@link Reducer} implementations.
@@ -31,7 +30,6 @@ import org.apache.hadoop.mapred.JobConfi
  * <p>Provides default no-op implementations for a few methods, most non-trivial
  * applications need to override some of them.</p>
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class MapReduceBase implements Closeable, JobConfigurable {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapRunnable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapRunnable.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapRunnable.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapRunnable.java Fri Feb 10 01:49:08 2012
@@ -30,9 +30,7 @@ import org.apache.hadoop.classification.
  * control on map processing e.g. multi-threaded, asynchronous mappers etc.</p>
  * 
  * @see Mapper
- * @deprecated Use {@link org.apache.hadoop.mapreduce.Mapper} instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface MapRunnable<K1, V1, K2, V2>

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java Fri Feb 10 01:49:08 2012
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem.Statistics;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RawLocalFileSystem;
@@ -142,7 +141,7 @@ class MapTask extends Task {
     private TaskReporter reporter;
     private long bytesInPrev = -1;
     private long bytesInCurr = -1;
-    private final Statistics fsStats;
+    private final List<Statistics> fsStats;
     
     TrackedRecordReader(TaskReporter reporter, JobConf job) 
       throws IOException{
@@ -150,7 +149,7 @@ class MapTask extends Task {
       fileInputByteCounter = reporter.getCounter(FileInputFormatCounter.BYTES_READ);
       this.reporter = reporter;
       
-      Statistics matchedStats = null;
+      List<Statistics> matchedStats = null;
       if (this.reporter.getInputSplit() instanceof FileSplit) {
         matchedStats = getFsStatistics(((FileSplit) this.reporter
             .getInputSplit()).getPath(), job);
@@ -211,8 +210,13 @@ class MapTask extends Task {
       return reporter;
     }
 
-    private long getInputBytes(Statistics stats) {
-      return stats == null ? 0 : stats.getBytesRead();
+    private long getInputBytes(List<Statistics> stats) {
+      if (stats == null) return 0;
+      long bytesRead = 0;
+      for (Statistics stat: stats) {
+        bytesRead = bytesRead + stat.getBytesRead();
+      }
+      return bytesRead;
     }
   }
 
@@ -427,7 +431,7 @@ class MapTask extends Task {
     private final org.apache.hadoop.mapreduce.Counter inputRecordCounter;
     private final org.apache.hadoop.mapreduce.Counter fileInputByteCounter;
     private final TaskReporter reporter;
-    private final Statistics fsStats;
+    private final List<Statistics> fsStats;
     
     NewTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
         org.apache.hadoop.mapreduce.InputFormat<K, V> inputFormat,
@@ -440,7 +444,7 @@ class MapTask extends Task {
       this.fileInputByteCounter = reporter
           .getCounter(FileInputFormatCounter.BYTES_READ);
 
-      Statistics matchedStats = null;
+      List <Statistics> matchedStats = null;
       if (split instanceof org.apache.hadoop.mapreduce.lib.input.FileSplit) {
         matchedStats = getFsStatistics(((org.apache.hadoop.mapreduce.lib.input.FileSplit) split)
             .getPath(), taskContext.getConfiguration());
@@ -499,8 +503,13 @@ class MapTask extends Task {
       return result;
     }
 
-    private long getInputBytes(Statistics stats) {
-      return stats == null ? 0 : stats.getBytesRead();
+    private long getInputBytes(List<Statistics> stats) {
+      if (stats == null) return 0;
+      long bytesRead = 0;
+      for (Statistics stat: stats) {
+        bytesRead = bytesRead + stat.getBytesRead();
+      }
+      return bytesRead;
     }
   }
 
@@ -555,7 +564,7 @@ class MapTask extends Task {
 
     private final Counters.Counter mapOutputRecordCounter;
     private final Counters.Counter fileOutputByteCounter; 
-    private final Statistics fsStats;
+    private final List<Statistics> fsStats;
     
     @SuppressWarnings("unchecked")
     NewDirectOutputCollector(MRJobConfig jobContext,
@@ -567,7 +576,7 @@ class MapTask extends Task {
       fileOutputByteCounter = reporter
           .getCounter(FileOutputFormatCounter.BYTES_WRITTEN);
 
-      Statistics matchedStats = null;
+      List<Statistics> matchedStats = null;
       if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) {
         matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
             .getOutputPath(taskContext), taskContext.getConfiguration());
@@ -604,8 +613,13 @@ class MapTask extends Task {
       }
     }
     
-    private long getOutputBytes(Statistics stats) {
-      return stats == null ? 0 : stats.getBytesWritten();
+    private long getOutputBytes(List<Statistics> stats) {
+      if (stats == null) return 0;
+      long bytesWritten = 0;
+      for (Statistics stat: stats) {
+        bytesWritten = bytesWritten + stat.getBytesWritten();
+      }
+      return bytesWritten;
     }
   }
   
@@ -736,7 +750,7 @@ class MapTask extends Task {
 
     private final Counters.Counter mapOutputRecordCounter;
     private final Counters.Counter fileOutputByteCounter;
-    private final Statistics fsStats;
+    private final List<Statistics> fsStats;
 
     @SuppressWarnings("unchecked")
     public DirectMapOutputCollector(TaskUmbilicalProtocol umbilical,
@@ -751,7 +765,7 @@ class MapTask extends Task {
       fileOutputByteCounter = reporter
           .getCounter(FileOutputFormatCounter.BYTES_WRITTEN);
 
-      Statistics matchedStats = null;
+      List<Statistics> matchedStats = null;
       if (outputFormat instanceof FileOutputFormat) {
         matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job);
       }
@@ -786,8 +800,13 @@ class MapTask extends Task {
       mapOutputRecordCounter.increment(1);
     }
 
-    private long getOutputBytes(Statistics stats) {
-      return stats == null ? 0 : stats.getBytesWritten();
+    private long getOutputBytes(List<Statistics> stats) {
+      if (stats == null) return 0;
+      long bytesWritten = 0;
+      for (Statistics stat: stats) {
+        bytesWritten = bytesWritten + stat.getBytesWritten();
+      }
+      return bytesWritten;
     }
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Mapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Mapper.java?rev=1242635&r1=1242634&r2=1242635&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Mapper.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Mapper.java Fri Feb 10 01:49:08 2012
@@ -129,9 +129,7 @@ import org.apache.hadoop.io.compress.Com
  * @see MapReduceBase
  * @see MapRunnable
  * @see SequenceFile
- * @deprecated Use {@link org.apache.hadoop.mapreduce.Mapper} instead.
  */
-@Deprecated
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface Mapper<K1, V1, K2, V2> extends JobConfigurable, Closeable {