You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by om...@apache.org on 2009/10/27 16:44:06 UTC

svn commit: r830230 [5/9] - in /hadoop/mapreduce/branches/HDFS-641: ./ .eclipse.templates/ conf/ ivy/ lib/ src/c++/ src/contrib/ src/contrib/capacity-scheduler/ src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/ src/contrib/capacity-sche...

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/AllTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/AllTests.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/AllTests.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/AllTests.java Tue Oct 27 15:43:58 2009
@@ -18,15 +18,7 @@
 
 package org.apache.hadoop.sqoop;
 
-import org.apache.hadoop.sqoop.hive.TestHiveImport;
-import org.apache.hadoop.sqoop.lib.TestFieldFormatter;
-import org.apache.hadoop.sqoop.lib.TestRecordParser;
-import org.apache.hadoop.sqoop.manager.TestHsqldbManager;
-import org.apache.hadoop.sqoop.manager.TestSqlManager;
 import org.apache.hadoop.sqoop.mapred.MapredTests;
-import org.apache.hadoop.sqoop.mapreduce.MapreduceTests;
-import org.apache.hadoop.sqoop.orm.TestClassWriter;
-import org.apache.hadoop.sqoop.orm.TestParseMethods;
 
 import junit.framework.Test;
 import junit.framework.TestSuite;
@@ -39,26 +31,11 @@
   private AllTests() { }
 
   public static Test suite() {
-    TestSuite suite = new TestSuite("Tests for org.apache.hadoop.sqoop");
+    TestSuite suite = new TestSuite("All tests for org.apache.hadoop.sqoop");
 
-    suite.addTestSuite(TestAllTables.class);
-    suite.addTestSuite(TestHsqldbManager.class);
-    suite.addTestSuite(TestSqlManager.class);
-    suite.addTestSuite(TestClassWriter.class);
-    suite.addTestSuite(TestColumnTypes.class);
-    suite.addTestSuite(TestMultiCols.class);
-    suite.addTestSuite(TestMultiMaps.class);
-    suite.addTestSuite(TestSplitBy.class);
-    suite.addTestSuite(TestWhere.class);
-    suite.addTestSuite(TestHiveImport.class);
-    suite.addTestSuite(TestRecordParser.class);
-    suite.addTestSuite(TestFieldFormatter.class);
-    suite.addTestSuite(TestImportOptions.class);
-    suite.addTestSuite(TestParseMethods.class);
-    suite.addTestSuite(TestConnFactory.class);
+    suite.addTest(SmokeTests.suite());
     suite.addTest(ThirdPartyTests.suite());
     suite.addTest(MapredTests.suite());
-    suite.addTest(MapreduceTests.suite());
 
     return suite;
   }

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java Tue Oct 27 15:43:58 2009
@@ -31,6 +31,7 @@
 import org.apache.hadoop.io.IOUtils;
 import org.junit.Before;
 
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 
@@ -47,12 +48,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapreduce.jobtracker.address=local");
-      args.add("-D");
-      args.add("mapreduce.job.maps=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--all-tables");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java Tue Oct 27 15:43:58 2009
@@ -184,4 +184,45 @@
     assertEquals('*', opts.getInputFieldDelim());
     assertEquals('|', opts.getOutputFieldDelim());
   }
+
+  public void testBadNumMappers1() {
+    String [] args = {
+      "--num-mappers",
+      "x"
+    };
+
+    try {
+      ImportOptions opts = new ImportOptions();
+      opts.parse(args);
+      fail("Expected InvalidOptionsException");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testBadNumMappers2() {
+    String [] args = {
+      "-m",
+      "x"
+    };
+
+    try {
+      ImportOptions opts = new ImportOptions();
+      opts.parse(args);
+      fail("Expected InvalidOptionsException");
+    } catch (ImportOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
+  public void testGoodNumMappers() throws ImportOptions.InvalidOptionsException {
+    String [] args = {
+      "-m",
+      "4"
+    };
+
+    ImportOptions opts = new ImportOptions();
+    opts.parse(args);
+    assertEquals(4, opts.getNumMappers());
+  }
 }

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java Tue Oct 27 15:43:58 2009
@@ -28,10 +28,12 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.testutil.SeqFileReader;
@@ -55,10 +57,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapred.job.tracker=local");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");
@@ -90,7 +89,9 @@
     conf.set("fs.default.name", "file:///");
     FileSystem fs = FileSystem.get(conf);
 
-    FileStatus [] stats = fs.listStatus(getTablePath());
+    FileStatus [] stats = fs.listStatus(getTablePath(),
+        new Utils.OutputFileUtils.OutputFilesFilter());
+
     for (FileStatus stat : stats) {
       paths.add(stat.getPath());
     }

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java Tue Oct 27 15:43:58 2009
@@ -28,6 +28,7 @@
 
 import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.testutil.SeqFileReader;
@@ -51,12 +52,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapred.job.tracker=local");
-      args.add("-D");
-      args.add("mapred.map.tasks=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Tue Oct 27 15:43:58 2009
@@ -28,6 +28,7 @@
 
 import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.testutil.SeqFileReader;
@@ -54,12 +55,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapreduce.jobtracker.address=local");
-      args.add("-D");
-      args.add("mapreduce.job.maps=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java Tue Oct 27 15:43:58 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.fs.Path;
 
 import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 
@@ -46,12 +47,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapreduce.jobtracker.address=local");
-      args.add("-D");
-      args.add("mapreduce.job.maps=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java Tue Oct 27 15:43:58 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 
 /**
@@ -134,8 +135,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java Tue Oct 27 15:43:58 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.util.FileListing;
 
@@ -141,10 +142,7 @@
   private String [] getArgv() {
     ArrayList<String> args = new ArrayList<String>();
 
-    args.add("-D");
-    args.add("fs.default.name=file:///");
-    args.add("-D");
-    args.add("mapreduce.jobtracker.address=local");
+    CommonArgs.addHadoopFlags(args);
 
     args.add("--table");
     args.add(TABLE_NAME);

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java Tue Oct 27 15:43:58 2009
@@ -38,6 +38,7 @@
 import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
 import org.apache.hadoop.sqoop.mapred.RawKeyTextOutputFormat;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
 import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.sqoop.testutil.ReparseMapper;
@@ -59,12 +60,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapreduce.jobtracker.address=local");
-      args.add("-D");
-      args.add("mapreduce.job.maps=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java Tue Oct 27 15:43:58 2009
@@ -286,12 +286,7 @@
     ArrayList<String> args = new ArrayList<String>();
 
     if (includeHadoopFlags) {
-      args.add("-D");
-      args.add("mapreduce.jobtracker.address=local");
-      args.add("-D");
-      args.add("mapreduce.job.maps=1");
-      args.add("-D");
-      args.add("fs.default.name=file:///");
+      CommonArgs.addHadoopFlags(args);
     }
 
     args.add("--table");

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 27 15:43:58 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/streaming:713112
 /hadoop/core/trunk/src/contrib/streaming:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/streaming:817879-818559
+/hadoop/mapreduce/trunk/src/contrib/streaming:817878-830225

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Tue Oct 27 15:43:58 2009
@@ -34,7 +34,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 /**
  * This test case tests the symlink creation
@@ -123,7 +123,8 @@
         String line2 = null;
         Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
                                      new Path(OUTPUT_DIR),
-                                     new OutputLogFilter()));
+                                     new Utils.OutputFileUtils
+                                              .OutputFilesFilter()));
         for (int i = 0; i < fileList.length; i++){
           System.out.println(fileList[i].toString());
           BufferedReader bread =

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java Tue Oct 27 15:43:58 2009
@@ -24,6 +24,7 @@
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
 
 import junit.framework.TestCase;
 
@@ -65,7 +66,7 @@
   public void testCommandLine() throws Exception {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -86,10 +87,8 @@
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
       INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 }

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,7 @@
 import junit.framework.TestCase;
 import java.io.*;
 
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapreduce.JobContext;
 
 /**
@@ -72,7 +73,7 @@
   {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -92,10 +93,12 @@
     } catch(Exception e) {
       failTrace(e);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        failTrace(e);
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java Tue Oct 27 15:43:58 2009
@@ -23,6 +23,7 @@
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
@@ -82,7 +83,7 @@
   {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -103,10 +104,12 @@
     } catch(Exception e) {
       failTrace(e);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        failTrace(e);
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java Tue Oct 27 15:43:58 2009
@@ -23,6 +23,7 @@
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 
 /**
@@ -75,7 +76,7 @@
     File outFile = null;
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -94,11 +95,12 @@
     } catch(Exception e) {
       failTrace(e);
     } finally {
-      outFile.delete();
-      File outFileCRC = new File(OUTPUT_DIR, "."+outFileName+".crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        failTrace(e);
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java Tue Oct 27 15:43:58 2009
@@ -22,6 +22,8 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.fs.FileUtil;
+
 /**
  * This class tests StreamXmlRecordReader
  * The test creates an XML file, uses StreamXmlRecordReader and compares
@@ -61,7 +63,7 @@
   public void testCommandLine() {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
       createInput();
@@ -74,10 +76,12 @@
     } catch (Exception e) {
       e.printStackTrace();
     } finally {
-      INPUT_FILE.delete();
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,8 @@
 import junit.framework.TestCase;
 import java.io.*;
 
+import org.apache.hadoop.fs.FileUtil;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  */
@@ -73,7 +75,7 @@
   {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -91,10 +93,12 @@
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java Tue Oct 27 15:43:58 2009
@@ -38,9 +38,9 @@
 import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputLogFilter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
@@ -126,7 +126,7 @@
     badRecs.addAll(REDUCER_BAD_RECORDS);
     Path[] outputFiles = FileUtil.stat2Paths(
         getFileSystem().listStatus(getOutputDir(),
-        new OutputLogFilter()));
+        new Utils.OutputFileUtils.OutputFilesFilter()));
     
     if (outputFiles.length > 0) {
       InputStream is = getFileSystem().open(outputFiles[0]);

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,7 @@
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.Counters.Group;
@@ -38,7 +39,7 @@
   {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -62,10 +63,12 @@
       assertNotNull("Counter", counter);
       assertEquals(3, counter.getCounter());
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
   }
   

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,8 @@
 import junit.framework.TestCase;
 import java.io.*;
 
+import org.apache.hadoop.fs.FileUtil;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode by giving
  * empty input to mapper and the mapper generates nonempty output. Since map()
@@ -100,11 +102,13 @@
       outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
       outFile.delete();
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      SCRIPT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        SCRIPT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Tue Oct 27 15:43:58 2009
@@ -75,7 +75,11 @@
     } catch(Exception e) {
       // Expecting an exception
     } finally {
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,8 @@
 import junit.framework.TestCase;
 import java.io.*;
 
+import org.apache.hadoop.fs.FileUtil;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  * This testcase looks at different cases of tab position in input. 
@@ -78,7 +80,7 @@
     File outFile = null;
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -97,12 +99,12 @@
     } catch(Exception e) {
       failTrace(e);
     } finally {
-      outFile.delete();
-      File outFileCRC = new File(OUTPUT_DIR,
-                          "." + outFileName + ".crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        failTrace(e);
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java Tue Oct 27 15:43:58 2009
@@ -23,6 +23,7 @@
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 
 /**
@@ -89,7 +90,7 @@
   {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -109,10 +110,12 @@
     } catch(Exception e) {
       failTrace(e);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
-      INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      try {
+        INPUT_FILE.delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+      } catch (IOException e) {
+        failTrace(e);
+      }
     }
   }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Tue Oct 27 15:43:58 2009
@@ -34,7 +34,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 /**
  * This test case tests the symlink creation
@@ -113,7 +113,8 @@
         String line = null;
         Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
                                                 new Path(OUTPUT_DIR),
-                                                new OutputLogFilter()));
+                                                new Utils.OutputFileUtils
+                                                         .OutputFilesFilter()));
         for (int i = 0; i < fileList.length; i++){
           System.out.println(fileList[i].toString());
           BufferedReader bread =

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java Tue Oct 27 15:43:58 2009
@@ -24,6 +24,7 @@
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
 
 import junit.framework.TestCase;
 
@@ -63,7 +64,7 @@
   public void testCommandLine() throws Exception {
     try {
       try {
-        OUTPUT_DIR.getAbsoluteFile().delete();
+        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
       } catch (Exception e) {
       }
 
@@ -84,10 +85,8 @@
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
     } finally {
-      File outFileCRC = new File(OUTPUT_DIR, ".part-00000.crc").getAbsoluteFile();
       INPUT_FILE.delete();
-      outFileCRC.delete();
-      OUTPUT_DIR.getAbsoluteFile().delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 }

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 27 15:43:58 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
 /hadoop/core/trunk/src/contrib/vaidya:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/vaidya:817879-818559
+/hadoop/mapreduce/trunk/src/contrib/vaidya:817878-830225

Modified: hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml Tue Oct 27 15:43:58 2009
@@ -204,38 +204,6 @@
           	users, no user can use more than 25% of the queue's resources. A 
           	value of 100 implies no user limits are imposed.</td>
           </tr>
-          <tr><td>mapred.capacity-scheduler.queue.&lt;queue-<br/>name&gt;.max.map.slots</td>
-          	<td>
-		    This value is the maximum max slots that can be used in a
-		    queue at any point of time. So for example assuming above config value
-		    is 100 , not more than 100 tasks would be in the queue at any point of
-		    time, assuming each task takes one slot.
-
-		    Default value of -1 would disable this capping feature
-
-		    Typically the queue capacity should be equal to this limit.
-		    If queue capacity is more than this limit, excess capacity will be
-		    used by the other queues. If queue capacity is less than the above
-		    limit , then the limit would be the queue capacity - as in the current
-		    implementation
-                </td>
-          </tr>
-          <tr><td>mapred.capacity-scheduler.queue.&lt;queue-<br/>name&gt;.max.reduce.slots</td>
-          	<td>
-		    This value is the maximum reduce slots that can be used in a
-		    queue at any point of time. So for example assuming above config value
-		    is 100 , not more than 100 tasks would be in the queue at any point of
-		    time, assuming each task takes one slot.
-
-		    Default value of -1 would disable this capping feature
-
-		    Typically the queue capacity should be equal to this limit.
-		    If queue capacity is more than this limit, excess capacity will be
-		    used by the other queues. If queue capacity is less than the above
-		    limit , then the limit would be the queue capacity - as in the current
-		    implementation
-                </td>
-          </tr>
         </table>
       </section>
       

Modified: hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/site.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/site.xml?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/site.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/site.xml Tue Oct 27 15:43:58 2009
@@ -43,6 +43,7 @@
 		<distcp    					label="DistCp"       href="distcp.html" />
 		<vaidya    					label="Vaidya" 		href="vaidya.html"/>
 		<archives  				label="Hadoop Archives"     href="hadoop_archives.html"/>
+		<gridmix  				label="Gridmix"     href="gridmix.html"/>
    </docs>
    
     <docs label="Schedulers">

Propchange: hadoop/mapreduce/branches/HDFS-641/src/examples/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 27 15:43:58 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/examples:713112
 /hadoop/core/trunk/src/examples:776175-784663
-/hadoop/mapreduce/trunk/src/examples:817879-818559
+/hadoop/mapreduce/trunk/src/examples:817878-830225

Modified: hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Pentomino.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Pentomino.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Pentomino.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Pentomino.java Tue Oct 27 15:43:58 2009
@@ -60,7 +60,7 @@
       }
       this.shape = new boolean[lines.size()][];
       for(int i=0 ; i < lines.size(); i++) {
-        this.shape[i] = (boolean[]) lines.get(i);
+        this.shape[i] = lines.get(i);
       }
     }
     
@@ -383,7 +383,7 @@
     }
     boolean[] row = new boolean[dancer.getNumberColumns()];
     for(int idx = 0; idx < pieces.size(); ++idx) {
-      Piece piece = (Piece) pieces.get(idx);
+      Piece piece = pieces.get(idx);
       row[idx + pieceBase] = true;
       generateRows(dancer, piece, width, height, false, row, idx == 0);
       if (piece.getFlippable()) {

Modified: hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Sudoku.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Sudoku.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Sudoku.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/dancing/Sudoku.java Tue Oct 27 15:43:58 2009
@@ -154,7 +154,7 @@
       line = file.readLine();
     }
     size = result.size();
-    board = (int[][]) result.toArray(new int [size][]);
+    board = result.toArray(new int [size][]);
     squareYSize = (int) Math.sqrt(size);
     squareXSize = size / squareYSize;
     file.close();
@@ -236,8 +236,8 @@
       rowValues[i] = false;
     }
     // find the square coordinates
-    int xBox = (int) x / squareXSize;
-    int yBox = (int) y / squareYSize;
+    int xBox = x / squareXSize;
+    int yBox = y / squareYSize;
     // mark the column
     rowValues[x*size + num - 1] = true;
     // mark the row

Modified: hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/pi/DistSum.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/pi/DistSum.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/pi/DistSum.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/pi/DistSum.java Tue Oct 27 15:43:58 2009
@@ -38,9 +38,9 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobTracker;
+import org.apache.hadoop.mapreduce.Cluster;
+import org.apache.hadoop.mapreduce.ClusterMetrics;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
@@ -379,14 +379,14 @@
   public static class MixMachine extends Machine {
     private static final MixMachine INSTANCE = new MixMachine();
     
-    private JobClient jobclient;
+    private Cluster cluster;
 
     /** {@inheritDoc} */
     @Override
     public synchronized void init(Job job) throws IOException {
       final Configuration conf = job.getConfiguration();
-      if (jobclient == null)
-        jobclient = new JobClient(JobTracker.getAddress(conf), conf);
+      if (cluster == null)
+        cluster = new Cluster(JobTracker.getAddress(conf), conf);
       chooseMachine(conf).init(job);
     }
 
@@ -398,9 +398,11 @@
       try {
         for(;; Thread.sleep(2000)) {
           //get cluster status
-          final ClusterStatus status = jobclient.getClusterStatus();
-          final int m = status.getMaxMapTasks() - status.getMapTasks();
-          final int r = status.getMaxReduceTasks() - status.getReduceTasks();
+          final ClusterMetrics status = cluster.getClusterStatus();
+          final int m = 
+            status.getMapSlotCapacity() - status.getOccupiedMapSlots();
+          final int r = 
+            status.getReduceSlotCapacity() - status.getOccupiedReduceSlots();
           if (m >= parts || r >= parts) {
             //favor ReduceSide machine
             final Machine value = r >= parts?

Modified: hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java Tue Oct 27 15:43:58 2009
@@ -106,7 +106,7 @@
   public static class TeraOutputCommitter extends FileOutputCommitter {
 
     @Override
-    public void cleanupJob(JobContext jobContext) {
+    public void commitJob(JobContext jobContext) {
     }
 
     @Override

Propchange: hadoop/mapreduce/branches/HDFS-641/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Oct 27 15:43:58 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/java:713112
 /hadoop/core/trunk/src/mapred:776175-785643
-/hadoop/mapreduce/trunk/src/java:817879-818559
+/hadoop/mapreduce/trunk/src/java:817878-830225

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/mapred-default.xml?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/mapred-default.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/mapred-default.xml Tue Oct 27 15:43:58 2009
@@ -333,6 +333,14 @@
 </property>
 
 <property>
+  <name>mapreduce.tasktracker.outofband.heartbeat</name>
+  <value>false</value>
+  <description>Expert: Set this to true to let the tasktracker send an 
+  out-of-band heartbeat on task-completion for better latency.
+  </description>
+</property>
+
+<property>
   <name>mapreduce.jobtracker.jobhistory.lru.cache.size</name>
   <value>5</value>
   <description>The number of job history files loaded in memory. The jobs are 
@@ -665,7 +673,7 @@
   <name>mapreduce.jobtracker.heartbeats.in.second</name>
   <value>100</value>
   <description>Expert: Approximate number of heart-beats that could arrive 
-               JobTracker in a second. Assuming each RPC can be processed 
+               at JobTracker in a second. Assuming each RPC can be processed 
                in 10msec, the default value is made 100 RPCs in a second.
   </description>
 </property> 

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/ClusterStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/ClusterStatus.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/ClusterStatus.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/ClusterStatus.java Tue Oct 27 15:43:58 2009
@@ -27,6 +27,8 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.mapreduce.ClusterMetrics;
+import org.apache.hadoop.mapreduce.TaskTrackerInfo;
 
 /**
  * Status information on the current state of the Map-Reduce cluster.
@@ -57,7 +59,9 @@
  * {@link JobClient#getClusterStatus()}.</p>
  * 
  * @see JobClient
+ * @deprecated  Use {@link ClusterMetrics} or {@link TaskTrackerInfo} instead
  */
+@Deprecated
 public class ClusterStatus implements Writable {
   /**
    * Class which encapsulates information about a blacklisted tasktracker.

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/EagerTaskInitializationListener.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/EagerTaskInitializationListener.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/EagerTaskInitializationListener.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/EagerTaskInitializationListener.java Tue Oct 27 15:43:58 2009
@@ -85,7 +85,7 @@
   private List<JobInProgress> jobInitQueue = new ArrayList<JobInProgress>();
   private ExecutorService threadPool;
   private int numThreads;
-  private TaskTrackerManager ttm;
+  TaskTrackerManager ttm;
   
   public EagerTaskInitializationListener(Configuration conf) {
     numThreads = 

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java Tue Oct 27 15:43:58 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.util.StringUtils;
 
 /** An {@link OutputCommitter} that commits files specified 
@@ -39,6 +40,9 @@
    * Temporary directory name 
    */
   public static final String TEMP_DIR_NAME = "_temporary";
+  public static final String SUCCEEDED_FILE_NAME = "_SUCCESS";
+  static final String SUCCESSFUL_JOB_OUTPUT_DIR_MARKER = 
+    "mapreduce.fileoutputcommitter.marksuccessfuljobs";
 
   public void setupJob(JobContext context) throws IOException {
     JobConf conf = context.getJobConf();
@@ -52,7 +56,38 @@
     }
   }
 
-  public void cleanupJob(JobContext context) throws IOException {
+  // True if the job requires output.dir marked on successful job.
+  // Note that by default it is set to true.
+  private boolean shouldMarkOutputDir(JobConf conf) {
+    return conf.getBoolean(SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true);
+  }
+  
+  public void commitJob(JobContext context) throws IOException {
+    // delete the _temporary folder in the output folder
+    cleanup(context);
+    // check if the output-dir marking is required
+    if (shouldMarkOutputDir(context.getJobConf())) {
+      // create a _success file in the output folder
+      markOutputDirSuccessful(context);
+    }
+  }
+  
+  // Create a _success file in the job's output folder
+  private void markOutputDirSuccessful(JobContext context) throws IOException {
+    JobConf conf = context.getJobConf();
+    // get the o/p path
+    Path outputPath = FileOutputFormat.getOutputPath(conf);
+    if (outputPath != null) {
+      // get the filesys
+      FileSystem fileSys = outputPath.getFileSystem(conf);
+      // create a file in the output folder to mark the job completion
+      Path filePath = new Path(outputPath, SUCCEEDED_FILE_NAME);
+      fileSys.create(filePath).close();
+    }
+  }
+  
+  // Deletes the _temporary folder in the job's output dir.
+  private void cleanup(JobContext context) throws IOException {
     JobConf conf = context.getJobConf();
     // do the clean up of temporary directory
     Path outputPath = FileOutputFormat.getOutputPath(conf);
@@ -62,10 +97,19 @@
       context.getProgressible().progress();
       if (fileSys.exists(tmpDir)) {
         fileSys.delete(tmpDir, true);
+      } else {
+        LOG.warn("Output Path is Null in cleanup");
       }
     }
   }
 
+  @Override
+  public void abortJob(JobContext context, int runState) 
+  throws IOException {
+    // simply delete the _temporary dir from the o/p folder of the job
+    cleanup(context);
+  }
+  
   public void setupTask(TaskAttemptContext context) throws IOException {
     // FileOutputCommitter's setupTask doesn't do anything. Because the
     // temporary task directory is created on demand when the 

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobConf.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobConf.java Tue Oct 27 15:43:58 2009
@@ -42,8 +42,6 @@
 import org.apache.hadoop.mapred.lib.KeyFieldBasedComparator;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
@@ -116,7 +114,8 @@
   }
 
   /**
-   * @deprecated
+   * @deprecated Use {@link #MAPRED_JOB_MAP_MEMORY_MB_PROPERTY} and
+   * {@link #MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY}
    */
   @Deprecated
   public static final String MAPRED_TASK_MAXVMEM_PROPERTY =
@@ -1608,32 +1607,68 @@
     return get(JobContext.JOB_LOCAL_DIR);
   }
 
+  /**
+   * Get memory required to run a map task of the job, in MB.
+   * 
+   * If a value is specified in the configuration, it is returned.
+   * Else, it returns {@link #DISABLED_MEMORY_LIMIT}.
+   * <p/>
+   * For backward compatibility, if the job configuration sets the
+   * key {@link #MAPRED_TASK_MAXVMEM_PROPERTY} to a value different
+   * from {@link #DISABLED_MEMORY_LIMIT}, that value will be used
+   * after converting it from bytes to MB.
+   * @return memory required to run a map task of the job, in MB,
+   *          or {@link #DISABLED_MEMORY_LIMIT} if unset.
+   */
   public long getMemoryForMapTask() {
-    if (get(MAPRED_TASK_MAXVMEM_PROPERTY) != null) {
-      long val = getLong(
-        MAPRED_TASK_MAXVMEM_PROPERTY, DISABLED_MEMORY_LIMIT);
-      return (val == DISABLED_MEMORY_LIMIT) ? val :
-        ((val < 0) ? DISABLED_MEMORY_LIMIT : val / (1024 * 1024));
+    long value = getDeprecatedMemoryValue();
+    if (value == DISABLED_MEMORY_LIMIT) {
+      value = normalizeMemoryConfigValue(
+                getLong(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY,
+                          DISABLED_MEMORY_LIMIT));
     }
-    return getLong(
-      JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY,
-      DISABLED_MEMORY_LIMIT);
+    return value;
   }
 
   public void setMemoryForMapTask(long mem) {
     setLong(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY, mem);
   }
 
+  /**
+   * Get memory required to run a reduce task of the job, in MB.
+   * 
+   * If a value is specified in the configuration, it is returned.
+   * Else, it returns {@link #DISABLED_MEMORY_LIMIT}.
+   * <p/>
+   * For backward compatibility, if the job configuration sets the
+   * key {@link #MAPRED_TASK_MAXVMEM_PROPERTY} to a value different
+   * from {@link #DISABLED_MEMORY_LIMIT}, that value will be used
+   * after converting it from bytes to MB.
+   * @return memory required to run a reduce task of the job, in MB,
+   *          or {@link #DISABLED_MEMORY_LIMIT} if unset.
+   */
   public long getMemoryForReduceTask() {
-    if (get(MAPRED_TASK_MAXVMEM_PROPERTY) != null) {
-      long val = getLong(
-        MAPRED_TASK_MAXVMEM_PROPERTY, DISABLED_MEMORY_LIMIT);
-      return (val == DISABLED_MEMORY_LIMIT) ? val :
-        ((val < 0) ? DISABLED_MEMORY_LIMIT : val / (1024 * 1024));
+    long value = getDeprecatedMemoryValue();
+    if (value == DISABLED_MEMORY_LIMIT) {
+      value = normalizeMemoryConfigValue(
+                getLong(JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
+                        DISABLED_MEMORY_LIMIT));
+    }
+    return value;
+  }
+  
+  // Return the value set to the key MAPRED_TASK_MAXVMEM_PROPERTY,
+  // converted into MBs.
+  // Returns DISABLED_MEMORY_LIMIT if unset, or set to a negative
+  // value.
+  private long getDeprecatedMemoryValue() {
+    long oldValue = getLong(MAPRED_TASK_MAXVMEM_PROPERTY, 
+        DISABLED_MEMORY_LIMIT);
+    oldValue = normalizeMemoryConfigValue(oldValue);
+    if (oldValue != DISABLED_MEMORY_LIMIT) {
+      oldValue /= (1024*1024);
     }
-    return getLong(
-      JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
-      DISABLED_MEMORY_LIMIT);
+    return oldValue;
   }
 
   public void setMemoryForReduceTask(long mem) {
@@ -1738,18 +1773,21 @@
 
 
   /**
-   * The maximum amount of memory any task of this job will use. See
+   * Get the memory required to run a task of this job, in bytes. See
    * {@link #MAPRED_TASK_MAXVMEM_PROPERTY}
    * <p/>
-   * mapred.task.maxvmem is split into
-   * mapreduce.map.memory.mb
-   * and mapreduce.map.memory.mb,mapred
-   * each of the new key are set
-   * as mapred.task.maxvmem / 1024
-   * as new values are in MB
+   * This method is deprecated. Now, different memory limits can be
+   * set for map and reduce tasks of a job, in MB. 
+   * <p/>
+   * For backward compatibility, if the job configuration sets the
+   * key {@link #MAPRED_TASK_MAXVMEM_PROPERTY} to a value different
+   * from {@link #DISABLED_MEMORY_LIMIT}, that value is returned. 
+   * Otherwise, this method will return the larger of the values returned by 
+   * {@link #getMemoryForMapTask()} and {@link #getMemoryForReduceTask()}
+   * after converting them into bytes.
    *
-   * @return The maximum amount of memory any task of this job will use, in
-   *         bytes.
+   * @return Memory required to run a task of this job, in bytes,
+   *          or {@link #DISABLED_MEMORY_LIMIT}, if unset.
    * @see #setMaxVirtualMemoryForTask(long)
    * @deprecated Use {@link #getMemoryForMapTask()} and
    *             {@link #getMemoryForReduceTask()}
@@ -1760,24 +1798,16 @@
       "getMaxVirtualMemoryForTask() is deprecated. " +
       "Instead use getMemoryForMapTask() and getMemoryForReduceTask()");
 
-    if (get(JobConf.MAPRED_TASK_MAXVMEM_PROPERTY) == null) {
-      if (get(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY) != null || get(
-        JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY) != null) {
-        long val = Math.max(getMemoryForMapTask(), getMemoryForReduceTask());
-        if (val == JobConf.DISABLED_MEMORY_LIMIT) {
-          return val;
-        } else {
-          if (val < 0) {
-            return JobConf.DISABLED_MEMORY_LIMIT;
-          }
-          return val * 1024 * 1024;
-          //Convert MB to byte as new value is in
-          // MB and old deprecated method returns bytes
-        }
+    long value = getLong(MAPRED_TASK_MAXVMEM_PROPERTY, DISABLED_MEMORY_LIMIT);
+    value = normalizeMemoryConfigValue(value);
+    if (value == DISABLED_MEMORY_LIMIT) {
+      value = Math.max(getMemoryForMapTask(), getMemoryForReduceTask());
+      value = normalizeMemoryConfigValue(value);
+      if (value != DISABLED_MEMORY_LIMIT) {
+        value *= 1024*1024;
       }
     }
-
-    return getLong(JobConf.MAPRED_TASK_MAXVMEM_PROPERTY, DISABLED_MEMORY_LIMIT);
+    return value;
   }
 
   /**

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobInProgress.java Tue Oct 27 15:43:58 2009
@@ -47,6 +47,7 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.JobStatus.State;
 import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
 import org.apache.hadoop.mapreduce.jobhistory.JobInfoChangeEvent;
@@ -116,7 +117,7 @@
   int failedMapTasks = 0; 
   int failedReduceTasks = 0;
   
-  private static float DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART = 0.05f;
+  static final float DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART = 0.05f;
   int completedMapsForReduceSlowstart = 0;
   
   // runningMapTasks include speculative tasks, so we need to capture 
@@ -170,7 +171,7 @@
    * {@link #findNewMapTask(TaskTrackerStatus, int, int, int, double)} should
    * schedule any available map tasks for this job, including speculative tasks.
    */
-  private int anyCacheLevel;
+  int anyCacheLevel;
   
   /**
    * A special value indicating that 
@@ -199,7 +200,7 @@
     new TreeMap<String, Integer>();
     
   //Confine estimation algorithms to an "oracle" class that JIP queries.
-  private ResourceEstimator resourceEstimator; 
+  ResourceEstimator resourceEstimator; 
   
   long startTime;
   long launchTime;
@@ -208,20 +209,20 @@
   // Indicates how many times the job got restarted
   private final int restartCount;
 
-  private JobConf conf;
+  JobConf conf;
   protected AtomicBoolean tasksInited = new AtomicBoolean(false);
   private JobInitKillStatus jobInitKillStatus = new JobInitKillStatus();
 
-  private LocalFileSystem localFs;
-  private FileSystem fs;
-  private JobID jobId;
+  LocalFileSystem localFs;
+  FileSystem fs;
+  JobID jobId;
   private boolean hasSpeculativeMaps;
   private boolean hasSpeculativeReduces;
-  private long inputLength = 0;
+  long inputLength = 0;
   
-  private Counters jobCounters = new Counters();
+  Counters jobCounters = new Counters();
   
-  private MetricsRecord jobMetrics;
+  MetricsRecord jobMetrics;
   
   // Maximum no. of fetch-failure notifications after which map task is killed
   private static final int MAX_FETCH_FAILURES_NOTIFICATIONS = 3;
@@ -238,9 +239,9 @@
   private Object schedulingInfo;
 
   //thresholds for speculative execution
-  private float slowTaskThreshold;
-  private float speculativeCap;
-  private float slowNodeThreshold; //standard deviations
+  float slowTaskThreshold;
+  float speculativeCap;
+  float slowNodeThreshold; //standard deviations
 
   //Statistics are maintained for a couple of things
   //mapTaskStats is used for maintaining statistics about
@@ -322,6 +323,7 @@
     this.status = new JobStatus(jobid, 0.0f, 0.0f, JobStatus.PREP, 
         this.profile.getUser(), this.profile.getJobName(), 
         this.profile.getJobFile(), "");
+    this.jobtracker.getInstrumentation().addPrepJob(conf, jobid);
     this.taskCompletionEvents = new ArrayList<TaskCompletionEvent>
     (numMapTasks + numReduceTasks + 10);
     
@@ -338,6 +340,11 @@
     }
   }
   
+  JobInProgress() {
+    restartCount = 0;
+    jobSetupCleanupNeeded = false;
+  }
+  
   /**
    * Create a JobInProgress with the given job file, plus a handle
    * to the tracker.
@@ -371,6 +378,7 @@
     this.status = new JobStatus(jobid, 0.0f, 0.0f, JobStatus.PREP, 
         profile.getUser(), profile.getJobName(), profile.getJobFile(), 
         profile.getURL().toString());
+    this.jobtracker.getInstrumentation().addPrepJob(conf, jobid);
     status.setStartTime(startTime);
     this.status.setJobPriority(this.priority);
 
@@ -739,10 +747,10 @@
     setup[1].setJobSetupTask();
   }
   
-  private void setupComplete() {
+  void setupComplete() {
     status.setSetupProgress(1.0f);
     if (this.status.getRunState() == JobStatus.PREP) {
-      this.status.setRunState(JobStatus.RUNNING);
+      changeStateTo(JobStatus.RUNNING);
       JobStatusChangedEvent jse = 
         new JobStatusChangedEvent(profile.getJobID(),
          JobStatus.getJobRunState(JobStatus.RUNNING));
@@ -1339,6 +1347,16 @@
       Task result = tip.getTaskToRun(tts.getTrackerName());
       if (result != null) {
         addRunningTaskToTIP(tip, result.getTaskID(), tts, true);
+        if (jobFailed) {
+          result.setJobCleanupTaskState(org.apache.hadoop.mapreduce.JobStatus
+                .State.FAILED);
+        } else if (jobKilled) {
+          result.setJobCleanupTaskState(org.apache.hadoop.mapreduce.JobStatus
+                .State.KILLED);
+        } else {
+          result.setJobCleanupTaskState(org.apache.hadoop.mapreduce
+                .JobStatus.State.SUCCEEDED);
+        }
       }
       return result;
     }
@@ -1523,6 +1541,7 @@
       name = TaskType.JOB_CLEANUP;
     } else if (tip.isMapTask()) {
       ++runningMapTasks;
+      metrics.addRunningMaps(jobId, 1);
       name = TaskType.MAP;
       counter = JobCounter.TOTAL_LAUNCHED_MAPS;
       splits = tip.getSplitNodes();
@@ -1534,6 +1553,7 @@
       metrics.launchMap(id);
     } else {
       ++runningReduceTasks;
+      metrics.addRunningReduces(jobId, 1);
       name = TaskType.REDUCE;
       counter = JobCounter.TOTAL_LAUNCHED_REDUCES;
       if (tip.isSpeculating()) {
@@ -1645,8 +1665,10 @@
     long now = System.currentTimeMillis();
     
     FallowSlotInfo info = map.get(taskTracker);
+    int reservedSlots = 0;
     if (info == null) {
       info = new FallowSlotInfo(now, numSlots);
+      reservedSlots = numSlots;
     } else {
       // Increment metering info if the reservation is changing
       if (info.getNumSlots() != numSlots) {
@@ -1658,11 +1680,19 @@
         jobCounters.incrCounter(counter, fallowSlotMillis);
         
         // Update 
+        reservedSlots = numSlots - info.getNumSlots();
         info.setTimestamp(now);
         info.setNumSlots(numSlots);
       }
     }
     map.put(taskTracker, info);
+    if (type == TaskType.MAP) {
+      jobtracker.getInstrumentation().addReservedMapSlots(reservedSlots);
+    }
+    else {
+      jobtracker.getInstrumentation().addReservedReduceSlots(reservedSlots);
+    }
+    jobtracker.incrementReservations(type, reservedSlots);
   }
   
   public synchronized void unreserveTaskTracker(TaskTracker taskTracker,
@@ -1688,6 +1718,14 @@
     jobCounters.incrCounter(counter, fallowSlotMillis);
 
     map.remove(taskTracker);
+    if (type == TaskType.MAP) {
+      jobtracker.getInstrumentation().decReservedMapSlots(info.getNumSlots());
+    }
+    else {
+      jobtracker.getInstrumentation().decReservedReduceSlots(
+        info.getNumSlots());
+    }
+    jobtracker.decrementReservations(type, info.getNumSlots());
   }
   
   public int getNumReservedTaskTrackersForMaps() {
@@ -2566,6 +2604,7 @@
       jobtracker.markCompletedTaskAttempt(status.getTaskTracker(), taskid);
     } else if (tip.isMapTask()) {
       runningMapTasks -= 1;
+      metrics.decRunningMaps(jobId, 1);
       finishedMapTasks += 1;
       metrics.completeMap(taskid);
       if (!tip.isJobSetupTask() && hasSpeculativeMaps) {
@@ -2578,6 +2617,7 @@
       }
     } else {
       runningReduceTasks -= 1;
+      metrics.decRunningReduces(jobId, 1);
       finishedReduceTasks += 1;
       metrics.completeReduce(taskid);
       if (!tip.isJobSetupTask() && hasSpeculativeReduces) {
@@ -2641,7 +2681,32 @@
   public float getSlowTaskThreshold() {
     return slowTaskThreshold;
   }
-  
+
+  /**
+   * Job state change must happen thru this call
+   */
+  private void changeStateTo(int newState) {
+    int oldState = this.status.getRunState();
+    if (oldState == newState) {
+      return; //old and new states are same
+    }
+    this.status.setRunState(newState);
+    
+    //update the metrics
+    if (oldState == JobStatus.PREP) {
+      this.jobtracker.getInstrumentation().decPrepJob(conf, jobId);
+    } else if (oldState == JobStatus.RUNNING) {
+      this.jobtracker.getInstrumentation().decRunningJob(conf, jobId);
+    }
+    
+    if (newState == JobStatus.PREP) {
+      this.jobtracker.getInstrumentation().addPrepJob(conf, jobId);
+    } else if (newState == JobStatus.RUNNING) {
+      this.jobtracker.getInstrumentation().addRunningJob(conf, jobId);
+    }
+    
+  }
+
   /**
    * The job is done since all it's component tasks are either
    * successful or have failed.
@@ -2653,7 +2718,7 @@
     //
     if (this.status.getRunState() == JobStatus.RUNNING ||
         this.status.getRunState() == JobStatus.PREP) {
-      this.status.setRunState(JobStatus.SUCCEEDED);
+      changeStateTo(JobStatus.SUCCEEDED);
       this.status.setCleanupProgress(1.0f);
       if (maps.length == 0) {
         this.status.setMapProgress(1.0f);
@@ -2702,9 +2767,9 @@
       this.status.setFinishTime(this.finishTime);
 
       if (jobTerminationState == JobStatus.FAILED) {
-        this.status.setRunState(JobStatus.FAILED);
+        changeStateTo(JobStatus.FAILED);
       } else {
-        this.status.setRunState(JobStatus.KILLED);
+        changeStateTo(JobStatus.KILLED);
       }
       // Log the job summary
       JobSummary.logJobSummary(this, jobtracker.getClusterStatus(false));
@@ -2723,6 +2788,13 @@
 
       jobtracker.getInstrumentation().terminateJob(
           this.conf, this.status.getJobID());
+      if (jobTerminationState == JobStatus.FAILED) {
+        jobtracker.getInstrumentation().failedJob(
+            this.conf, this.status.getJobID());
+      } else {
+        jobtracker.getInstrumentation().killedJob(
+            this.conf, this.status.getJobID());
+      }
     }
   }
 
@@ -2894,6 +2966,7 @@
         launchedSetup = false;
       } else if (tip.isMapTask()) {
         runningMapTasks -= 1;
+        metrics.decRunningMaps(jobId, 1);
         metrics.failedMap(taskid);
         // remove from the running queue and put it in the non-running cache
         // if the tip is not complete i.e if the tip still needs to be run
@@ -2903,6 +2976,7 @@
         }
       } else {
         runningReduceTasks -= 1;
+        metrics.decRunningReduces(jobId, 1);
         metrics.failedReduce(taskid);
         // remove from the running queue and put in the failed queue if the tip
         // is not complete

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobPriority.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobPriority.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobPriority.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobPriority.java Tue Oct 27 15:43:58 2009
@@ -21,6 +21,7 @@
  * Used to describe the priority of the running job. 
  * @deprecated Use {@link org.apache.hadoop.mapreduce.JobPriority} instead
  */
+@Deprecated
 public enum JobPriority {
 
   VERY_HIGH,

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobStatus.java?rev=830230&r1=830229&r2=830230&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobStatus.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobStatus.java Tue Oct 27 15:43:58 2009
@@ -317,4 +317,9 @@
      return super.getReduceProgress(); 
    }
 
+   // A utility to convert new job runstates to the old ones.
+   static int getOldNewJobRunState(
+     org.apache.hadoop.mapreduce.JobStatus.State state) {
+     return state.getValue();
+   }
 }
\ No newline at end of file