You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by su...@apache.org on 2012/11/30 20:58:54 UTC

svn commit: r1415815 [3/3] - in /hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project: ./ conf/ dev-support/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/h...

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsView.java Fri Nov 30 19:58:09 2012
@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.web
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.postInitID;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
@@ -54,7 +53,6 @@ public class HsView extends TwoColumnLay
   protected void commonPreHead(Page.HTML<_> html) {
     set(ACCORDION_ID, "nav");
     set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
-    set(THEMESWITCHER_ID, "themeswitcher");
   }
 
   /*
@@ -84,17 +82,15 @@ public class HsView extends TwoColumnLay
    */
   private String jobsTableInit() {
     return tableInit().
+        append(", 'aaData': jobsTableData").
+        append(", bDeferRender: true").
+        append(", bProcessing: true").
+
         // Sort by id upon page load
         append(", aaSorting: [[2, 'desc']]").
         append(", aoColumnDefs:[").
-        // Maps Total
-        append("{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 7 ] }").
-        // Maps Completed
-        append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 8 ] }").
-        // Reduces Total
-        append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 9 ] }").
-        // Reduces Completed
-        append(",{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 10 ] }").
+        // Maps Total, Maps Completed, Reduces Total and Reduces Completed
+        append("{'sType':'numeric', 'bSearchable': false, 'aTargets': [ 7, 8, 9, 10 ] }").
         append("]}").
         toString();
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java Fri Nov 30 19:58:09 2012
@@ -22,17 +22,20 @@ import javax.xml.bind.annotation.XmlAcce
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlRootElement;
 
+import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
 import org.apache.hadoop.util.VersionInfo;
 
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
 public class HistoryInfo {
 
+  protected long startedOn;
   protected String hadoopVersion;
   protected String hadoopBuildVersion;
   protected String hadoopVersionBuiltOn;
 
   public HistoryInfo() {
+    this.startedOn = JobHistoryServer.historyServerTimeStamp;
     this.hadoopVersion = VersionInfo.getVersion();
     this.hadoopBuildVersion = VersionInfo.getBuildVersion();
     this.hadoopVersionBuiltOn = VersionInfo.getDate();
@@ -50,4 +53,8 @@ public class HistoryInfo {
     return this.hadoopVersionBuiltOn;
   }
 
+  public long getStartedOn() {
+    return this.startedOn;
+  }
+
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java Fri Nov 30 19:58:09 2012
@@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
+import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.Clock;
@@ -344,21 +345,24 @@ public class TestHsWebServices extends J
   }
 
   public void verifyHsInfoGeneric(String hadoopVersionBuiltOn,
-      String hadoopBuildVersion, String hadoopVersion) {
+      String hadoopBuildVersion, String hadoopVersion, long startedon) {
     WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
         VersionInfo.getDate(), hadoopVersionBuiltOn);
     WebServicesTestUtils.checkStringMatch("hadoopBuildVersion",
         VersionInfo.getBuildVersion(), hadoopBuildVersion);
     WebServicesTestUtils.checkStringMatch("hadoopVersion",
         VersionInfo.getVersion(), hadoopVersion);
+    assertEquals("startedOn doesn't match: ",
+        JobHistoryServer.historyServerTimeStamp, startedon);
   }
 
   public void verifyHSInfo(JSONObject info, TestAppContext ctx)
       throws JSONException {
-    assertEquals("incorrect number of elements", 3, info.length());
+    assertEquals("incorrect number of elements", 4, info.length());
 
     verifyHsInfoGeneric(info.getString("hadoopVersionBuiltOn"),
-        info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"));
+        info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"),
+        info.getLong("startedOn"));
   }
 
   public void verifyHSInfoXML(String xml, TestAppContext ctx)
@@ -376,7 +380,8 @@ public class TestHsWebServices extends J
       verifyHsInfoGeneric(
           WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
           WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
-          WebServicesTestUtils.getXmlString(element, "hadoopVersion"));
+          WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
+          WebServicesTestUtils.getXmlLong(element, "startedOn"));
     }
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java Fri Nov 30 19:58:09 2012
@@ -50,37 +50,40 @@ public class TestNLineInputFormat extend
     Job job = Job.getInstance(conf);
     Path file = new Path(workDir, "test.txt");
 
-    int seed = new Random().nextInt();
-    Random random = new Random(seed);
-
     localFs.delete(workDir, true);
     FileInputFormat.setInputPaths(job, workDir);
     int numLinesPerMap = 5;
     NLineInputFormat.setNumLinesPerSplit(job, numLinesPerMap);
-    // for a variety of lengths
     for (int length = 0; length < MAX_LENGTH;
-         length += random.nextInt(MAX_LENGTH / 10) + 1) {
+         length += 1) {
+ 
       // create a file with length entries
       Writer writer = new OutputStreamWriter(localFs.create(file));
       try {
         for (int i = 0; i < length; i++) {
-          writer.write(Integer.toString(i));
+          writer.write(Integer.toString(i)+" some more text");
           writer.write("\n");
         }
       } finally {
         writer.close();
       }
-      checkFormat(job, numLinesPerMap);
+      int lastN = 0;
+      if (length != 0) {
+        lastN = length % 5;
+        if (lastN == 0) {
+          lastN = 5;
+        }
+      }
+      checkFormat(job, numLinesPerMap, lastN);
     }
   }
 
-  void checkFormat(Job job, int expectedN) 
+  void checkFormat(Job job, int expectedN, int lastN) 
       throws IOException, InterruptedException {
     NLineInputFormat format = new NLineInputFormat();
     List<InputSplit> splits = format.getSplits(job);
-    // check all splits except last one
     int count = 0;
-    for (int i = 0; i < splits.size() -1; i++) {
+    for (int i = 0; i < splits.size(); i++) {
       assertEquals("There are no split locations", 0,
                    splits.get(i).getLocations().length);
       TaskAttemptContext context = MapReduceTestUtil.
@@ -104,8 +107,13 @@ public class TestNLineInputFormat extend
       } finally {
         reader.close();
       }
-      assertEquals("number of lines in split is " + expectedN ,
-                   expectedN, count);
+      if ( i == splits.size() - 1) {
+        assertEquals("number of lines in split(" + i + ") is wrong" ,
+                     lastN, count);
+      } else {
+        assertEquals("number of lines in split(" + i + ") is wrong" ,
+                     expectedN, count);
+      }
     }
   }
   

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestBinaryTokenFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestBinaryTokenFile.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestBinaryTokenFile.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestBinaryTokenFile.java Fri Nov 30 19:58:09 2012
@@ -35,26 +35,28 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
-@SuppressWarnings("deprecation")
-@Ignore
 public class TestBinaryTokenFile {
 
+  private static final String KEY_SECURITY_TOKEN_FILE_NAME = "key-security-token-file";
+  private static final String DELEGATION_TOKEN_KEY = "Hdfs";
+  
   // my sleep class
   static class MySleepMapper extends SleepJob.SleepMapper {
     /**
@@ -63,29 +65,65 @@ public class TestBinaryTokenFile {
     @Override
     public void map(IntWritable key, IntWritable value, Context context)
     throws IOException, InterruptedException {
-      // get token storage and a key
-      Credentials ts = context.getCredentials();
-      Collection<Token<? extends TokenIdentifier>> dts = ts.getAllTokens();
+      // get context token storage:
+      final Credentials contextCredentials = context.getCredentials();
       
+      final Collection<Token<? extends TokenIdentifier>> contextTokenCollection = contextCredentials.getAllTokens();
+      for (Token<? extends TokenIdentifier> t : contextTokenCollection) {
+        System.out.println("Context token: [" + t + "]");
+      }
+      if (contextTokenCollection.size() != 2) { // one job token and one delegation token
+        // fail the test:
+        throw new RuntimeException("Exactly 2 tokens are expected in the contextTokenCollection: " +
+        		"one job token and one delegation token, but was found " + contextTokenCollection.size() + " tokens.");
+      }
       
-      if(dts.size() != 2) { // one job token and one delegation token
-        throw new RuntimeException("tokens are not available"); // fail the test
+      final Token<? extends TokenIdentifier> dt = contextCredentials.getToken(new Text(DELEGATION_TOKEN_KEY));
+      if (dt == null) {
+        throw new RuntimeException("Token for key ["+DELEGATION_TOKEN_KEY+"] not found in the job context.");
       }
       
-      Token<? extends TokenIdentifier> dt = ts.getToken(new Text("Hdfs"));
+      String tokenFile0 = context.getConfiguration().get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
+      if (tokenFile0 != null) {
+        throw new RuntimeException("Token file key ["+MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY+"] found in the configuration. It should have been removed from the configuration.");
+      }
       
-      //Verify that dt is same as the token in the file
-      String tokenFile = context.getConfiguration().get(
-          "mapreduce.job.credentials.binary");
-      Credentials cred = new Credentials();
-      cred.readTokenStorageStream(new DataInputStream(new FileInputStream(
+      final String tokenFile = context.getConfiguration().get(KEY_SECURITY_TOKEN_FILE_NAME);
+      if (tokenFile == null) {
+        throw new RuntimeException("Token file key ["+KEY_SECURITY_TOKEN_FILE_NAME+"] not found in the job configuration.");
+      }
+      final Credentials binaryCredentials = new Credentials();
+      binaryCredentials.readTokenStorageStream(new DataInputStream(new FileInputStream(
           tokenFile)));
-      for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {
-        if (!dt.equals(t)) {
-          throw new RuntimeException(
-              "Delegation token in job is not same as the token passed in file."
-                  + " tokenInFile=" + t + ", dt=" + dt);
-        }
+      final Collection<Token<? extends TokenIdentifier>> binaryTokenCollection = binaryCredentials.getAllTokens();
+      if (binaryTokenCollection.size() != 1) {
+        throw new RuntimeException("The token collection read from file ["+tokenFile+"] must have size = 1.");
+      }
+      final Token<? extends TokenIdentifier> binTok = binaryTokenCollection.iterator().next(); 
+      System.out.println("The token read from binary file: t = [" + binTok + "]");
+      // Verify that dt is same as the token in the file:
+      if (!dt.equals(binTok)) {
+        throw new RuntimeException(
+              "Delegation token in job is not same as the token passed in file:"
+                  + " tokenInFile=[" + binTok + "], dt=[" + dt + "].");
+      }
+      
+      // Now test the user tokens.
+      final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+      // Print all the UGI tokens for diagnostic purposes:
+      final Collection<Token<? extends TokenIdentifier>> ugiTokenCollection = ugi.getTokens();
+      for (Token<? extends TokenIdentifier> t: ugiTokenCollection) {
+        System.out.println("UGI token: [" + t + "]");
+      }
+      final Token<? extends TokenIdentifier> ugiToken 
+        = ugi.getCredentials().getToken(new Text(DELEGATION_TOKEN_KEY));
+      if (ugiToken == null) {
+        throw new RuntimeException("Token for key ["+DELEGATION_TOKEN_KEY+"] not found among the UGI tokens.");
+      }
+      if (!ugiToken.equals(binTok)) {
+        throw new RuntimeException(
+              "UGI token is not same as the token passed in binary file:"
+                  + " tokenInBinFile=[" + binTok + "], ugiTok=[" + ugiToken + "].");
       }
       
       super.map(key, value, context);
@@ -118,13 +156,20 @@ public class TestBinaryTokenFile {
         TokenCache.obtainTokensForNamenodesInternal(cred1, new Path[] { p1 },
             job.getConfiguration());
         for (Token<? extends TokenIdentifier> t : cred1.getAllTokens()) {
-          cred2.addToken(new Text("Hdfs"), t);
+          cred2.addToken(new Text(DELEGATION_TOKEN_KEY), t);
         }
         DataOutputStream os = new DataOutputStream(new FileOutputStream(
             binaryTokenFileName.toString()));
-        cred2.writeTokenStorageToStream(os);
-        os.close();
-        job.getConfiguration().set("mapreduce.job.credentials.binary",
+        try {
+          cred2.writeTokenStorageToStream(os);
+        } finally {
+          os.close();
+        }
+        job.getConfiguration().set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY,
+            binaryTokenFileName.toString());
+        // NB: the MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY key now gets deleted from config, 
+        // so it's not accessible in the job's config. So, we use another key to pass the file name into the job configuration:  
+        job.getConfiguration().set(KEY_SECURITY_TOKEN_FILE_NAME, 
             binaryTokenFileName.toString());
       } catch (IOException e) {
         Assert.fail("Exception " + e);
@@ -132,39 +177,53 @@ public class TestBinaryTokenFile {
     }
   }
   
-  private static MiniMRCluster mrCluster;
+  private static MiniMRYarnCluster mrCluster;
   private static MiniDFSCluster dfsCluster;
+  
   private static final Path TEST_DIR = 
     new Path(System.getProperty("test.build.data","/tmp"));
   private static final Path binaryTokenFileName = new Path(TEST_DIR, "tokenFile.binary");
-  private static int numSlaves = 1;
-  private static JobConf jConf;
+  
+  private static final int numSlaves = 1; // num of data nodes
+  private static final int noOfNMs = 1;
+  
   private static Path p1;
   
   @BeforeClass
   public static void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    dfsCluster = new MiniDFSCluster(conf, numSlaves, true, null);
-    jConf = new JobConf(conf);
-    mrCluster = new MiniMRCluster(0, 0, numSlaves, 
-        dfsCluster.getFileSystem().getUri().toString(), 1, null, null, null, 
-        jConf);
+    final Configuration conf = new Configuration();
+    
+    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
+    conf.set(YarnConfiguration.RM_PRINCIPAL, "jt_id/" + SecurityUtil.HOSTNAME_PATTERN + "@APACHE.ORG");
+    
+    final MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+    builder.checkExitOnShutdown(true);
+    builder.numDataNodes(numSlaves);
+    builder.format(true);
+    builder.racks(null);
+    dfsCluster = builder.build();
+    
+    mrCluster = new MiniMRYarnCluster(TestBinaryTokenFile.class.getName(), noOfNMs);
+    mrCluster.init(conf);
+    mrCluster.start();
 
-    NameNodeAdapter.getDtSecretManager(dfsCluster.getNamesystem()).startThreads();
-    FileSystem fs = dfsCluster.getFileSystem();
+    NameNodeAdapter.getDtSecretManager(dfsCluster.getNamesystem()).startThreads(); 
     
+    FileSystem fs = dfsCluster.getFileSystem(); 
     p1 = new Path("file1");
     p1 = fs.makeQualified(p1);
   }
 
   @AfterClass
   public static void tearDown() throws Exception {
-    if(mrCluster != null)
-      mrCluster.shutdown();
-    mrCluster = null;
-    if(dfsCluster != null)
+    if(mrCluster != null) {
+      mrCluster.stop();
+      mrCluster = null;
+    }
+    if(dfsCluster != null) {
       dfsCluster.shutdown();
-    dfsCluster = null;
+      dfsCluster = null;
+    }
   }
   
   /**
@@ -173,31 +232,24 @@ public class TestBinaryTokenFile {
    */
   @Test
   public void testBinaryTokenFile() throws IOException {
-    
-    System.out.println("running dist job");
-    
-    // make sure JT starts
-    jConf = mrCluster.createJobConf();
+    Configuration conf = mrCluster.getConfig();
     
     // provide namenodes names for the job to get the delegation tokens for
-    String nnUri = dfsCluster.getURI(0).toString();
-    jConf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri);
-    // job tracker principla id..
-    jConf.set(JTConfig.JT_USER_NAME, "jt_id");
+    final String nnUri = dfsCluster.getURI(0).toString();
+    conf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri);
     
     // using argument to pass the file name
-    String[] args = { 
+    final String[] args = { 
         "-m", "1", "-r", "1", "-mt", "1", "-rt", "1"
         };
-     
     int res = -1;
     try {
-      res = ToolRunner.run(jConf, new MySleepJob(), args);
+      res = ToolRunner.run(conf, new MySleepJob(), args);
     } catch (Exception e) {
-      System.out.println("Job failed with" + e.getLocalizedMessage());
+      System.out.println("Job failed with " + e.getLocalizedMessage());
       e.printStackTrace(System.out);
       fail("Job failed");
     }
-    assertEquals("dist job res is not 0", res, 0);
+    assertEquals("dist job res is not 0:", 0, res);
   }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java Fri Nov 30 19:58:09 2012
@@ -31,6 +31,7 @@ import org.apache.hadoop.mapred.RunningJ
 
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -52,6 +53,8 @@ public class TestEncryptedShuffle {
   private static final String BASEDIR =
     System.getProperty("test.build.dir", "target/test-dir") + "/" +
     TestEncryptedShuffle.class.getSimpleName();
+  
+  private String classpathDir;
 
   @BeforeClass
   public static void setUp() throws Exception {
@@ -62,27 +65,12 @@ public class TestEncryptedShuffle {
 
   @Before
   public void createCustomYarnClasspath() throws Exception {
-    String classpathDir =
-      KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
-
-    URL url = Thread.currentThread().getContextClassLoader().
-      getResource("mrapp-generated-classpath");
-    File f = new File(url.getPath());
-    BufferedReader reader = new BufferedReader(new FileReader(f));
-    String cp = reader.readLine();
-    cp = cp + ":" + classpathDir;
-    f = new File(classpathDir, "mrapp-generated-classpath");
-    Writer writer = new FileWriter(f);
-    writer.write(cp);
-    writer.close();
+    classpathDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
     new File(classpathDir, "core-site.xml").delete();
   }
 
   @After
   public void cleanUpMiniClusterSpecialConfig() throws Exception {
-    String classpathDir =
-      KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
-    new File(classpathDir, "mrapp-generated-classpath").delete();
     new File(classpathDir, "core-site.xml").delete();
     String keystoresDir = new File(BASEDIR).getAbsolutePath();
     KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, classpathDir);
@@ -98,6 +86,9 @@ public class TestEncryptedShuffle {
     conf.set("dfs.block.access.token.enable", "false");
     conf.set("dfs.permissions", "true");
     conf.set("hadoop.security.authentication", "simple");
+    String cp = conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH) +
+      File.pathSeparator + classpathDir;
+    conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, cp);
     dfsCluster = new MiniDFSCluster(conf, 1, true, null);
     FileSystem fileSystem = dfsCluster.getFileSystem();
     fileSystem.mkdirs(new Path("/tmp"));
@@ -113,8 +104,6 @@ public class TestEncryptedShuffle {
     mrCluster = MiniMRClientClusterFactory.create(this.getClass(), 1, conf);
 
     // so the minicluster conf is avail to the containers.
-    String classpathDir =
-      KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
     Writer writer = new FileWriter(classpathDir + "/core-site.xml");
     mrCluster.getConfig().writeXml(writer);
     writer.close();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Fri Nov 30 19:58:09 2012
@@ -37,6 +37,7 @@ import java.io.RandomAccessFile;
 import java.net.InetSocketAddress;
 import java.net.URL;
 import java.nio.ByteBuffer;
+import java.nio.channels.ClosedChannelException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -45,6 +46,7 @@ import java.util.concurrent.ConcurrentHa
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
 
 import javax.crypto.SecretKey;
 
@@ -106,6 +108,7 @@ import org.jboss.netty.handler.ssl.SslHa
 import org.jboss.netty.handler.stream.ChunkedWriteHandler;
 import org.jboss.netty.util.CharsetUtil;
 
+import com.google.common.base.Charsets;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 public class ShuffleHandler extends AbstractService 
@@ -119,10 +122,16 @@ public class ShuffleHandler extends Abst
   public static final String SHUFFLE_READAHEAD_BYTES = "mapreduce.shuffle.readahead.bytes";
   public static final int DEFAULT_SHUFFLE_READAHEAD_BYTES = 4 * 1024 * 1024;
 
+  // pattern to identify errors related to the client closing the socket early
+  // idea borrowed from Netty SslHandler
+  private static final Pattern IGNORABLE_ERROR_MESSAGE = Pattern.compile(
+      "^.*(?:connection.*reset|connection.*closed|broken.*pipe).*$",
+      Pattern.CASE_INSENSITIVE);
+
   private int port;
   private ChannelFactory selector;
   private final ChannelGroup accepted = new DefaultChannelGroup();
-  private HttpPipelineFactory pipelineFact;
+  protected HttpPipelineFactory pipelineFact;
   private int sslFileBufferSize;
 
   /**
@@ -318,13 +327,17 @@ public class ShuffleHandler extends Abst
     }
   }
 
+  protected Shuffle getShuffle(Configuration conf) {
+    return new Shuffle(conf);
+  }
+
   class HttpPipelineFactory implements ChannelPipelineFactory {
 
     final Shuffle SHUFFLE;
     private SSLFactory sslFactory;
 
     public HttpPipelineFactory(Configuration conf) throws Exception {
-      SHUFFLE = new Shuffle(conf);
+      SHUFFLE = getShuffle(conf);
       if (conf.getBoolean(MRConfig.SHUFFLE_SSL_ENABLED_KEY,
                           MRConfig.SHUFFLE_SSL_ENABLED_DEFAULT)) {
         sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf);
@@ -464,7 +477,7 @@ public class ShuffleHandler extends Abst
       lastMap.addListener(ChannelFutureListener.CLOSE);
     }
 
-    private void verifyRequest(String appid, ChannelHandlerContext ctx,
+    protected void verifyRequest(String appid, ChannelHandlerContext ctx,
         HttpRequest request, HttpResponse response, URL requestUri)
         throws IOException {
       SecretKey tokenSecret = secretManager.retrieveTokenSecret(appid);
@@ -490,7 +503,8 @@ public class ShuffleHandler extends Abst
       SecureShuffleUtils.verifyReply(urlHashStr, enc_str, tokenSecret);
       // verification passed - encode the reply
       String reply =
-        SecureShuffleUtils.generateHash(urlHashStr.getBytes(), tokenSecret);
+        SecureShuffleUtils.generateHash(urlHashStr.getBytes(Charsets.UTF_8), 
+            tokenSecret);
       response.setHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
       if (LOG.isDebugEnabled()) {
         int len = reply.length();
@@ -564,12 +578,12 @@ public class ShuffleHandler extends Abst
       return writeFuture;
     }
 
-    private void sendError(ChannelHandlerContext ctx,
+    protected void sendError(ChannelHandlerContext ctx,
         HttpResponseStatus status) {
       sendError(ctx, "", status);
     }
 
-    private void sendError(ChannelHandlerContext ctx, String message,
+    protected void sendError(ChannelHandlerContext ctx, String message,
         HttpResponseStatus status) {
       HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status);
       response.setHeader(CONTENT_TYPE, "text/plain; charset=UTF-8");
@@ -588,6 +602,16 @@ public class ShuffleHandler extends Abst
       if (cause instanceof TooLongFrameException) {
         sendError(ctx, BAD_REQUEST);
         return;
+      } else if (cause instanceof IOException) {
+        if (cause instanceof ClosedChannelException) {
+          LOG.debug("Ignoring closed channel error", cause);
+          return;
+        }
+        String message = String.valueOf(cause.getMessage());
+        if (IGNORABLE_ERROR_MESSAGE.matcher(message).matches()) {
+          LOG.debug("Ignoring client socket close", cause);
+          return;
+        }
       }
 
       LOG.error("Shuffle error: ", cause);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java Fri Nov 30 19:58:09 2012
@@ -17,17 +17,35 @@
  */
 package org.apache.hadoop.mapred;
 
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.apache.hadoop.test.MockitoMaker.make;
+import static org.apache.hadoop.test.MockitoMaker.stub;
+import static org.jboss.netty.buffer.ChannelBuffers.wrappedBuffer;
+import static org.junit.Assert.assertEquals;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.MetricsSource;
-import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
 import org.apache.hadoop.metrics2.MetricsSystem;
-import static org.apache.hadoop.test.MetricsAsserts.*;
-
+import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
+import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.ChannelFuture;
-
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.handler.codec.http.HttpRequest;
+import org.jboss.netty.handler.codec.http.HttpResponse;
+import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+import org.junit.Assert;
 import org.junit.Test;
-import static org.junit.Assert.*;
-import static org.apache.hadoop.test.MockitoMaker.*;
 
 public class TestShuffleHandler {
   static final long MiB = 1024 * 1024;
@@ -69,4 +87,76 @@ public class TestShuffleHandler {
     assertCounter("ShuffleOutputsOK", succeeded, rb);
     assertGauge("ShuffleConnections", connections, rb);
   }
+
+  @Test
+  public void testClientClosesConnection() throws Exception {
+    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
+    Configuration conf = new Configuration();
+    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
+    ShuffleHandler shuffleHandler = new ShuffleHandler() {
+      @Override
+      protected Shuffle getShuffle(Configuration conf) {
+        // replace the shuffle handler with one stubbed for testing
+        return new Shuffle(conf) {
+          @Override
+          protected void verifyRequest(String appid, ChannelHandlerContext ctx,
+              HttpRequest request, HttpResponse response, URL requestUri)
+                  throws IOException {
+          }
+          @Override
+          protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx,
+              Channel ch, String user, String jobId, String mapId, int reduce)
+                  throws IOException {
+            // send a shuffle header and a lot of data down the channel
+            // to trigger a broken pipe
+            ShuffleHeader header =
+                new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
+            DataOutputBuffer dob = new DataOutputBuffer();
+            header.write(dob);
+            ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+            dob = new DataOutputBuffer();
+            for (int i=0; i<100000; ++i) {
+              header.write(dob);
+            }
+            return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+          }
+          @Override
+          protected void sendError(ChannelHandlerContext ctx,
+              HttpResponseStatus status) {
+            if (failures.size() == 0) {
+              failures.add(new Error());
+              ctx.getChannel().close();
+            }
+          }
+          @Override
+          protected void sendError(ChannelHandlerContext ctx, String message,
+              HttpResponseStatus status) {
+            if (failures.size() == 0) {
+              failures.add(new Error());
+              ctx.getChannel().close();
+            }
+          }
+        };
+      }
+    };
+    shuffleHandler.init(conf);
+    shuffleHandler.start();
+
+    // simulate a reducer that closes early by reading a single shuffle header
+    // then closing the connection
+    URL url = new URL("http://127.0.0.1:"
+      + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
+      + "/mapOutput?job=job_12345_1&reduce=1&map=attempt_12345_1_m_1_0");
+    HttpURLConnection conn = (HttpURLConnection)url.openConnection();
+    conn.connect();
+    DataInputStream input = new DataInputStream(conn.getInputStream());
+    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+    ShuffleHeader header = new ShuffleHeader();
+    header.readFields(input);
+    input.close();
+
+    shuffleHandler.stop();
+    Assert.assertTrue("sendError called when client closed connection",
+        failures.size() == 0);
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml Fri Nov 30 19:58:09 2012
@@ -60,4 +60,10 @@
     <Bug pattern="ICAST_QUESTIONABLE_UNSIGNED_RIGHT_SHIFT" />
   </Match>
 
+  <Match>
+    <Class name="org.apache.hadoop.examples.terasort.TeraInputFormat" />
+      <Method name="getSplits" />
+    <Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
+  </Match>
+
 </FindBugsFilter>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml Fri Nov 30 19:58:09 2012
@@ -103,6 +103,11 @@
        <artifactId>hsqldb</artifactId>
        <scope>provided</scope>
      </dependency>
+     <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <scope>provided</scope>
+     </dependency>
   </dependencies>
   
   <build>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java Fri Nov 30 19:58:09 2012
@@ -22,7 +22,9 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.io.PrintStream;
+import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
@@ -50,6 +52,8 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 /**
  * A map/reduce program that uses Bailey-Borwein-Plouffe to compute exact 
  * digits of Pi.
@@ -151,7 +155,8 @@ public class BaileyBorweinPlouffe extend
         LOG.info("Writing text output to " + outfile);
         final OutputStream outputstream = fs.create(outfile);
         try {
-          final PrintStream out = new PrintStream(outputstream, true);
+          final PrintWriter out = new PrintWriter(
+              new OutputStreamWriter(outputstream, Charsets.UTF_8), true);
           // write hex text
           print(out, hex.iterator(), "Pi = 0x3.", "%02X", 5, 5);
           out.println("Total number of hexadecimal digits is "
@@ -184,7 +189,7 @@ public class BaileyBorweinPlouffe extend
   }
 
   /** Print out elements in a nice format. */
-  private static <T> void print(PrintStream out, Iterator<T> iterator,
+  private static <T> void print(PrintWriter out, Iterator<T> iterator,
       String prefix, String format, int elementsPerGroup, int groupsPerLine) {
     final StringBuilder sb = new StringBuilder("\n");
     for (int i = 0; i < prefix.length(); i++)

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java Fri Nov 30 19:58:09 2012
@@ -37,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 public class WordMean extends Configured implements Tool {
 
   private double mean = 0;
@@ -125,7 +127,7 @@ public class WordMean extends Configured
 
     // average = total sum / number of elements;
     try {
-      br = new BufferedReader(new InputStreamReader(fs.open(file)));
+      br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
 
       long count = 0;
       long length = 0;
@@ -151,7 +153,9 @@ public class WordMean extends Configured
       System.out.println("The mean is: " + theMean);
       return theMean;
     } finally {
-      br.close();
+      if (br != null) {
+        br.close();
+      }
     }
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java Fri Nov 30 19:58:09 2012
@@ -38,6 +38,8 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 public class WordMedian extends Configured implements Tool {
 
   private double median = 0;
@@ -127,7 +129,7 @@ public class WordMedian extends Configur
     BufferedReader br = null;
 
     try {
-      br = new BufferedReader(new InputStreamReader(fs.open(file)));
+      br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
       int num = 0;
 
       String line;
@@ -157,7 +159,9 @@ public class WordMedian extends Configur
         }
       }
     } finally {
-      br.close();
+      if (br != null) {
+        br.close();
+      }
     }
     // error, no median found
     return -1;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java Fri Nov 30 19:58:09 2012
@@ -37,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 public class WordStandardDeviation extends Configured implements Tool {
 
   private double stddev = 0;
@@ -135,7 +137,7 @@ public class WordStandardDeviation exten
     double stddev = 0;
     BufferedReader br = null;
     try {
-      br = new BufferedReader(new InputStreamReader(fs.open(file)));
+      br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
       long count = 0;
       long length = 0;
       long square = 0;
@@ -166,7 +168,9 @@ public class WordStandardDeviation exten
       stddev = Math.sqrt((term - mean));
       System.out.println("The standard deviation is: " + stddev);
     } finally {
-      br.close();
+      if (br != null) {
+        br.close();
+      }
     }
     return stddev;
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java Fri Nov 30 19:58:09 2012
@@ -33,6 +33,8 @@ import org.apache.hadoop.mapreduce.lib.i
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.*;
 
+import com.google.common.base.Charsets;
+
 /**
  * Launch a distributed pentomino solver.
  * It generates a complete list of prefixes of length N with each unique prefix
@@ -137,9 +139,9 @@ public class DistributedPentomino extend
     fs.mkdirs(dir);
     List<int[]> splits = pent.getSplits(depth);
     Path input = new Path(dir, "part1");
-    PrintStream file = 
-      new PrintStream(new BufferedOutputStream
-                      (fs.create(input), 64*1024));
+    PrintWriter file = 
+      new PrintWriter(new OutputStreamWriter(new BufferedOutputStream
+                      (fs.create(input), 64*1024), Charsets.UTF_8));
     for(int[] prefix: splits) {
       for(int i=0; i < prefix.length; ++i) {
         if (i != 0) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java Fri Nov 30 19:58:09 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.examples.danci
 import java.io.*;
 import java.util.*;
 
+import com.google.common.base.Charsets;
+
 /**
  * This class uses the dancing links algorithm from Knuth to solve sudoku
  * puzzles. It has solved 42x42 puzzles in 1.02 seconds.
@@ -133,7 +135,8 @@ public class Sudoku {
    * @param stream The input stream to read the data from
    */
   public Sudoku(InputStream stream) throws IOException {
-    BufferedReader file = new BufferedReader(new InputStreamReader(stream));
+    BufferedReader file = new BufferedReader(
+        new InputStreamReader(stream, Charsets.UTF_8));
     String line = file.readLine();
     List<int[]> result = new ArrayList<int[]>();
     while (line != null) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java Fri Nov 30 19:58:09 2012
@@ -19,9 +19,11 @@ package org.apache.hadoop.examples.pi;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -32,6 +34,8 @@ import java.util.TreeMap;
 import org.apache.hadoop.examples.pi.math.Bellard;
 import org.apache.hadoop.examples.pi.math.Bellard.Parameter;
 
+import com.google.common.base.Charsets;
+
 /** A class for parsing outputs */
 public final class Parser {
   static final String VERBOSE_PROPERTY = "pi.parser.verbose";
@@ -71,7 +75,8 @@ public final class Parser {
       for(Parameter p : Parameter.values())
         m.put(p, new ArrayList<TaskResult>());
 
-      final BufferedReader in = new BufferedReader(new FileReader(f)); 
+      final BufferedReader in = new BufferedReader(
+          new InputStreamReader(new FileInputStream(f), Charsets.UTF_8)); 
       try {
         for(String line; (line = in.readLine()) != null; )
           try {
@@ -127,7 +132,8 @@ public final class Parser {
         Collections.sort(results);
 
         final PrintWriter out = new PrintWriter(
-            new FileWriter(new File(outputdir, p + ".txt")), true);
+            new OutputStreamWriter(new FileOutputStream(
+                new File(outputdir, p + ".txt")), Charsets.UTF_8), true);
         try {
           for(int i = 0; i < results.size(); i++)
             out.println(DistSum.taskResult2string(p + "." + i, results.get(i)));

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java Fri Nov 30 19:58:09 2012
@@ -19,9 +19,10 @@ package org.apache.hadoop.examples.pi;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.text.SimpleDateFormat;
@@ -46,6 +47,8 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.ToolRunner;
 
+import com.google.common.base.Charsets;
+
 /** Utility methods */
 public class Util {
   /** Output stream */
@@ -81,7 +84,7 @@ public class Util {
       final long t = System.currentTimeMillis();
       final long delta = t - (isAccumulative? start: previous);
       if (s != null) {
-        out.format("%15dms (=%-15s: %s\n", delta, millis2String(delta) + ")", s);
+        out.format("%15dms (=%-15s: %s%n", delta, millis2String(delta) + ")", s);
         out.flush();
       }
       previous = t;
@@ -203,16 +206,16 @@ public class Util {
       throw new IllegalArgumentException("dir (=" + dir + ") is not a directory.");
   }
 
-  private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
   /** Create a writer of a local file. */
   public static PrintWriter createWriter(File dir, String prefix) throws IOException {
     checkDirectory(dir);
-
+    
+    SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
     for(;;) {
       final File f = new File(dir,
-          prefix + DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".txt");
+          prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
       if (!f.exists())
-        return new PrintWriter(new FileWriter(f));
+        return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
 
       try {Thread.sleep(10);} catch (InterruptedException e) {}
     }
@@ -286,7 +289,8 @@ public class Util {
     final List<TaskResult> results = new ArrayList<TaskResult>();
     for(FileStatus status : fs.listStatus(outdir)) {
       if (status.getPath().getName().startsWith("part-")) {
-        final BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(status.getPath())));
+        final BufferedReader in = new BufferedReader(
+            new InputStreamReader(fs.open(status.getPath()), Charsets.UTF_8));
         try {
           for(String line; (line = in.readLine()) != null; )
             results.add(TaskResult.valueOf(line));
@@ -305,7 +309,7 @@ public class Util {
   static void writeResults(String name, List<TaskResult> results, FileSystem fs, String dir) throws IOException {
     final Path outfile = new Path(dir, name + ".txt");
     Util.out.println(name + "> writing results to " + outfile);
-    final PrintStream out = new PrintStream(fs.create(outfile), true);
+    final PrintWriter out = new PrintWriter(new OutputStreamWriter(fs.create(outfile), Charsets.UTF_8), true);
     try {
       for(TaskResult r : results)
         out.println(r);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java?rev=1415815&r1=1415814&r2=1415815&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java Fri Nov 30 19:58:09 2012
@@ -29,6 +29,8 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
+import com.google.common.base.Charsets;
+
 class TeraScheduler {
   static String USE = "mapreduce.terasort.use.terascheduler";
   private static final Log LOG = LogFactory.getLog(TeraScheduler.class);
@@ -73,7 +75,8 @@ class TeraScheduler {
 
   List<String> readFile(String filename) throws IOException {
     List<String> result = new ArrayList<String>(10000);
-    BufferedReader in = new BufferedReader(new FileReader(filename));
+    BufferedReader in = new BufferedReader(
+        new InputStreamReader(new FileInputStream(filename), Charsets.UTF_8));
     String line = in.readLine();
     while (line != null) {
       result.add(line);