You are viewing a plain text version of this content. The canonical link for it is here.
Posted to yarn-commits@hadoop.apache.org by cn...@apache.org on 2013/12/16 21:09:01 UTC

svn commit: r1551332 - in /hadoop/common/branches/HDFS-4685/hadoop-yarn-project: ./ hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/ hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/...

Author: cnauroth
Date: Mon Dec 16 20:08:58 2013
New Revision: 1551332

URL: http://svn.apache.org/r1551332
Log:
Merge trunk to HDFS-4685.

Modified:
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/CHANGES.txt
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServer.java
    hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/CHANGES.txt?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/CHANGES.txt Mon Dec 16 20:08:58 2013
@@ -168,6 +168,13 @@ Release 2.4.0 - UNRELEASED
     YARN-1311. Fixed app specific scheduler-events' names to be app-attempt
     based. (vinodkv via jianhe)
 
+    YARN-1485. Modified RM HA configuration validation to also ensure that
+    service-address configuration are configured for every RM. (Xuan Gong via
+    vinodkv)
+
+    YARN-1435. Modified Distributed Shell to accept either the command or the
+    custom script. (Xuan Gong via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -239,6 +246,12 @@ Release 2.4.0 - UNRELEASED
     YARN-1405. Fixed ResourceManager to not hang when init/start fails with an
     exception w.r.t state-store. (Jian He via vinodkv)
 
+    YARN-1505. Fixed Webapplication proxy server to not hardcode its bind
+    address. (Xuan Gong via vinodkv)
+
+    YARN-1145. Fixed a potential file-handle leak in the web interface for
+    displaying aggregated logs. (Rohith Sharma via vinodkv)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java Mon Dec 16 20:08:58 2013
@@ -58,13 +58,17 @@ public class HAUtil {
    */
   public static void verifyAndSetConfiguration(Configuration conf)
     throws YarnRuntimeException {
-    verifyAndSetRMHAIds(conf);
-    verifyAndSetRMHAId(conf);
+    verifyAndSetRMHAIdsList(conf);
+    verifyAndSetCurrentRMHAId(conf);
     verifyAndSetAllServiceAddresses(conf);
   }
 
-
-  private static void verifyAndSetRMHAIds(Configuration conf) {
+  /**
+   * Verify configuration that there are at least two RM-ids
+   * and RPC addresses are specified for each RM-id.
+   * Then set the RM-ids.
+   */
+  private static void verifyAndSetRMHAIdsList(Configuration conf) {
     Collection<String> ids =
       conf.getTrimmedStringCollection(YarnConfiguration.RM_HA_IDS);
     if (ids.size() < 2) {
@@ -76,6 +80,24 @@ public class HAUtil {
 
     StringBuilder setValue = new StringBuilder();
     for (String id: ids) {
+      // verify the RM service addresses configurations for every RMIds
+      for (String prefix : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
+        String confKey = null;
+        try {
+          confKey = addSuffix(prefix, id);
+          if (conf.getTrimmed(confKey) == null) {
+            throwBadConfigurationException(getNeedToSetValueMessage(confKey));
+          }
+        } catch (IllegalArgumentException iae) {
+          String errmsg = iae.getMessage();
+          if (confKey == null) {
+            // Error at addSuffix
+            errmsg = getInvalidValueMessage(YarnConfiguration.RM_HA_ID,
+              getRMHAId(conf));
+          }
+          throwBadConfigurationException(errmsg);
+        }
+      }
       setValue.append(id);
       setValue.append(",");
     }
@@ -83,7 +105,7 @@ public class HAUtil {
       setValue.substring(0, setValue.length() - 1));
   }
 
-  private static void verifyAndSetRMHAId(Configuration conf) {
+  private static void verifyAndSetCurrentRMHAId(Configuration conf) {
     String rmId = conf.getTrimmed(YarnConfiguration.RM_HA_ID);
     if (rmId == null) {
       throwBadConfigurationException(

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java Mon Dec 16 20:08:58 2013
@@ -218,13 +218,14 @@ public class ApplicationMaster {
   private long shellScriptPathLen = 0;
 
   // Hardcoded path to shell script in launch container's local env
-  private final String ExecShellStringPath = "ExecShellScript.sh";
+  private static final String ExecShellStringPath = "ExecShellScript.sh";
+  private static final String ExecBatScripStringtPath = "ExecBatScript.bat";
 
   // Hardcoded path to custom log_properties
-  private final String log4jPath = "log4j.properties";
+  private static final String log4jPath = "log4j.properties";
 
-  private final String shellCommandPath = "shellCommands";
-  private final String shellArgsPath = "shellArgs";
+  private static final String shellCommandPath = "shellCommands";
+  private static final String shellArgsPath = "shellArgs";
 
   private volatile boolean done;
   private volatile boolean success;
@@ -234,6 +235,9 @@ public class ApplicationMaster {
   // Launch threads
   private List<Thread> launchThreads = new ArrayList<Thread>();
 
+  private final String linux_bash_command = "bash";
+  private final String windows_command = "cmd /c";
+
   /**
    * @param args Command line args
    */
@@ -308,8 +312,6 @@ public class ApplicationMaster {
     Options opts = new Options();
     opts.addOption("app_attempt_id", true,
         "App Attempt ID. Not to be used unless for testing purposes");
-    opts.addOption("shell_script", true,
-        "Location of the shell script to be executed");
     opts.addOption("shell_env", true,
         "Environment for shell script. Specified as env_key=env_val pairs");
     opts.addOption("container_memory", true,
@@ -387,11 +389,15 @@ public class ApplicationMaster {
         + appAttemptID.getApplicationId().getClusterTimestamp()
         + ", attemptId=" + appAttemptID.getAttemptId());
 
-    if (!fileExist(shellCommandPath)) {
+    if (!fileExist(shellCommandPath)
+        && envs.get(DSConstants.DISTRIBUTEDSHELLSCRIPTLOCATION).isEmpty()) {
       throw new IllegalArgumentException(
-          "No shell command specified to be executed by application master");
+          "No shell command or shell script specified to be executed by application master");
+    }
+
+    if (fileExist(shellCommandPath)) {
+      shellCommand = readContent(shellCommandPath);
     }
-    shellCommand = readContent(shellCommandPath);
 
     if (fileExist(shellArgsPath)) {
       shellArgs = readContent(shellArgsPath);
@@ -847,7 +853,9 @@ public class ApplicationMaster {
         }
         shellRsrc.setTimestamp(shellScriptPathTimestamp);
         shellRsrc.setSize(shellScriptPathLen);
-        localResources.put(ExecShellStringPath, shellRsrc);
+        localResources.put(Shell.WINDOWS ? ExecBatScripStringtPath :
+            ExecShellStringPath, shellRsrc);
+        shellCommand = Shell.WINDOWS ? windows_command : linux_bash_command;
       }
       ctx.setLocalResources(localResources);
 
@@ -858,7 +866,8 @@ public class ApplicationMaster {
       vargs.add(shellCommand);
       // Set shell script path
       if (!shellScriptPath.isEmpty()) {
-        vargs.add(ExecShellStringPath);
+        vargs.add(Shell.WINDOWS ? ExecBatScripStringtPath
+            : ExecShellStringPath);
       }
 
       // Set args for the shell command if any

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java Mon Dec 16 20:08:58 2013
@@ -49,6 +49,7 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -167,11 +168,14 @@ public class Client {
   // Command line options
   private Options opts;
 
-  private final String shellCommandPath = "shellCommands";
-  private final String shellArgsPath = "shellArgs";
-  private final String appMasterJarPath = "AppMaster.jar";
+  private static final String shellCommandPath = "shellCommands";
+  private static final String shellArgsPath = "shellArgs";
+  private static final String appMasterJarPath = "AppMaster.jar";
   // Hardcoded path to custom log_properties
-  private final String log4jPath = "log4j.properties";
+  private static final String log4jPath = "log4j.properties";
+
+  private static final String linuxShellPath = "ExecShellScript.sh";
+  private static final String windowBatPath = "ExecBatScript.bat";
 
   /**
    * @param args Command line arguments 
@@ -225,8 +229,11 @@ public class Client {
     opts.addOption("master_memory", true, "Amount of memory in MB to be requested to run the application master");
     opts.addOption("master_vcores", true, "Amount of virtual cores to be requested to run the application master");
     opts.addOption("jar", true, "Jar file containing the application master");
-    opts.addOption("shell_command", true, "Shell command to be executed by the Application Master");
-    opts.addOption("shell_script", true, "Location of the shell script to be executed");
+    opts.addOption("shell_command", true, "Shell command to be executed by " +
+        "the Application Master. Can only specify either --shell_command " +
+        "or --shell_script");
+    opts.addOption("shell_script", true, "Location of the shell script to be " +
+        "executed. Can only specify either --shell_command or --shell_script");
     opts.addOption("shell_args", true, "Command line args for the shell script." +
         "Multiple args can be separated by empty space.");
     opts.getOption("shell_args").setArgs(Option.UNLIMITED_VALUES);
@@ -308,12 +315,15 @@ public class Client {
 
     appMasterJar = cliParser.getOptionValue("jar");
 
-    if (!cliParser.hasOption("shell_command")) {
-      throw new IllegalArgumentException("No shell command specified to be executed by application master");
-    }
-    shellCommand = cliParser.getOptionValue("shell_command");
-
-    if (cliParser.hasOption("shell_script")) {
+    if (!cliParser.hasOption("shell_command") && !cliParser.hasOption("shell_script")) {
+      throw new IllegalArgumentException(
+          "No shell command or shell script specified to be executed by application master");
+    } else if (cliParser.hasOption("shell_command") && cliParser.hasOption("shell_script")) {
+      throw new IllegalArgumentException("Can not specify shell_command option " +
+          "and shell_script option at the same time");
+    } else if (cliParser.hasOption("shell_command")) {
+      shellCommand = cliParser.getOptionValue("shell_command");
+    } else {
       shellScriptPath = cliParser.getOptionValue("shell_script");
     }
     if (cliParser.hasOption("shell_args")) {
@@ -466,8 +476,11 @@ public class Client {
     long hdfsShellScriptTimestamp = 0;
     if (!shellScriptPath.isEmpty()) {
       Path shellSrc = new Path(shellScriptPath);
-      String shellPathSuffix = appName + "/" + appId.getId() + "/ExecShellScript.sh";
-      Path shellDst = new Path(fs.getHomeDirectory(), shellPathSuffix);
+      String shellPathSuffix =
+          appName + "/" + appId.getId() + "/"
+              + (Shell.WINDOWS ? windowBatPath : linuxShellPath);
+      Path shellDst =
+          new Path(fs.getHomeDirectory(), shellPathSuffix);
       fs.copyFromLocalFile(false, true, shellSrc, shellDst);
       hdfsShellScriptLocation = shellDst.toUri().toString(); 
       FileStatus shellFileStatus = fs.getFileStatus(shellDst);

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java Mon Dec 16 20:08:58 2013
@@ -304,6 +304,54 @@ public class TestDistributedShell {
   }
 
   @Test(timeout=90000)
+  public void testDSShellWithShellScript() throws Exception {
+    final File basedir =
+        new File("target", TestDistributedShell.class.getName());
+    final File tmpDir = new File(basedir, "tmpDir");
+    tmpDir.mkdirs();
+    final File customShellScript = new File(tmpDir, "custom_script.sh");
+    if (customShellScript.exists()) {
+      customShellScript.delete();
+    }
+    if (!customShellScript.createNewFile()) {
+      Assert.fail("Can not create custom shell script file.");
+    }
+    PrintWriter fileWriter = new PrintWriter(customShellScript);
+    // set the output to DEBUG level
+    fileWriter.write("echo testDSShellWithShellScript");
+    fileWriter.close();
+    System.out.println(customShellScript.getAbsolutePath());
+    String[] args = {
+        "--jar",
+        APPMASTER_JAR,
+        "--num_containers",
+        "1",
+        "--shell_script",
+        customShellScript.getAbsolutePath(),
+        "--master_memory",
+        "512",
+        "--master_vcores",
+        "2",
+        "--container_memory",
+        "128",
+        "--container_vcores",
+        "1"
+    };
+
+    LOG.info("Initializing DS Client");
+    final Client client =
+        new Client(new Configuration(yarnCluster.getConfig()));
+    boolean initSuccess = client.init(args);
+    Assert.assertTrue(initSuccess);
+    LOG.info("Running DS Client");
+    boolean result = client.run();
+    LOG.info("Client run completed. Result=" + result);
+    List<String> expectedContent = new ArrayList<String>();
+    expectedContent.add("testDSShellWithShellScript");
+    verifyContainerLog(1, expectedContent, false, "");
+  }
+
+  @Test(timeout=90000)
   public void testDSShellWithInvalidArgs() throws Exception {
     Client client = new Client(new Configuration(yarnCluster.getConfig()));
 
@@ -399,6 +447,58 @@ public class TestDistributedShell {
       Assert.assertTrue("The throw exception is not expected",
           e.getMessage().contains("Invalid virtual cores specified"));
     }
+
+    LOG.info("Initializing DS Client with --shell_command and --shell_script");
+    try {
+      String[] args = {
+          "--jar",
+          APPMASTER_JAR,
+          "--num_containers",
+          "2",
+          "--shell_command",
+          Shell.WINDOWS ? "dir" : "ls",
+          "--master_memory",
+          "512",
+          "--master_vcores",
+          "2",
+          "--container_memory",
+          "128",
+          "--container_vcores",
+          "1",
+          "--shell_script",
+          "test.sh"
+      };
+      client.init(args);
+      Assert.fail("Exception is expected");
+    } catch (IllegalArgumentException e) {
+      Assert.assertTrue("The throw exception is not expected",
+          e.getMessage().contains("Can not specify shell_command option " +
+          "and shell_script option at the same time"));
+    }
+
+    LOG.info("Initializing DS Client without --shell_command and --shell_script");
+    try {
+      String[] args = {
+          "--jar",
+          APPMASTER_JAR,
+          "--num_containers",
+          "2",
+          "--master_memory",
+          "512",
+          "--master_vcores",
+          "2",
+          "--container_memory",
+          "128",
+          "--container_vcores",
+          "1"
+      };
+      client.init(args);
+      Assert.fail("Exception is expected");
+    } catch (IllegalArgumentException e) {
+      Assert.assertTrue("The throw exception is not expected",
+          e.getMessage().contains("No shell command or shell script specified " +
+          "to be executed by application master"));
+    }
   }
 
   protected static void waitForNMToRegister(NodeManager nm)
@@ -490,10 +590,10 @@ public class TestDistributedShell {
       for (File output : containerFiles[i].listFiles()) {
         if (output.getName().trim().contains("stdout")) {
           BufferedReader br = null;
+          List<String> stdOutContent = new ArrayList<String>();
           try {
 
             String sCurrentLine;
-
             br = new BufferedReader(new FileReader(output));
             int numOfline = 0;
             while ((sCurrentLine = br.readLine()) != null) {
@@ -502,12 +602,25 @@ public class TestDistributedShell {
                   numOfWords++;
                 }
               } else if (output.getName().trim().equals("stdout")){
-                Assert.assertEquals("The current is" + sCurrentLine,
-                    expectedContent.get(numOfline), sCurrentLine.trim());
-                numOfline++;
+                if (! Shell.WINDOWS) {
+                  Assert.assertEquals("The current is" + sCurrentLine,
+                      expectedContent.get(numOfline), sCurrentLine.trim());
+                  numOfline++;
+                } else {
+                  stdOutContent.add(sCurrentLine.trim());
+                }
               }
             }
-
+            /* By executing bat script using cmd /c,
+             * it will output all contents from bat script first
+             * It is hard for us to do check line by line
+             * Simply check whether output from bat file contains
+             * all the expected messages
+             */
+            if (Shell.WINDOWS && !count
+                && output.getName().trim().equals("stdout")) {
+              Assert.assertTrue(stdOutContent.containsAll(expectedContent));
+            }
           } catch (IOException e) {
             e.printStackTrace();
           } finally {
@@ -523,6 +636,5 @@ public class TestDistributedShell {
     }
     return numOfWords;
   }
-
 }
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java Mon Dec 16 20:08:58 2013
@@ -53,6 +53,7 @@ import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SecureIOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.file.tfile.TFile;
@@ -294,7 +295,7 @@ public class AggregatedLogFormat {
       out.close();
     }
 
-    public void closeWriter() {
+    public void close() {
       try {
         this.writer.close();
       } catch (IOException e) {
@@ -569,9 +570,8 @@ public class AggregatedLogFormat {
       out.println("");
     }
 
-    public void close() throws IOException {
-      this.scanner.close();
-      this.fsDataIStream.close();
+    public void close() {
+      IOUtils.cleanup(LOG, scanner, reader, fsDataIStream);
     }
   }
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java Mon Dec 16 20:08:58 2013
@@ -59,109 +59,113 @@ public class AggregatedLogsBlock extends
 
   @Override
   protected void render(Block html) {
-    ContainerId containerId = verifyAndGetContainerId(html);
-    NodeId nodeId = verifyAndGetNodeId(html);
-    String appOwner = verifyAndGetAppOwner(html);
-    LogLimits logLimits = verifyAndGetLogLimits(html);
-    if (containerId == null || nodeId == null || appOwner == null
-        || appOwner.isEmpty() || logLimits == null) {
-      return;
-    }
-    
-    ApplicationId applicationId =
-        containerId.getApplicationAttemptId().getApplicationId();
-    String logEntity = $(ENTITY_STRING);
-    if (logEntity == null || logEntity.isEmpty()) {
-      logEntity = containerId.toString();
-    }
-
-    if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
-        YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
-      html.h1()
-          ._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
-          ._();
-      return;
-    }
-    
-    Path remoteRootLogDir =
-        new Path(conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
-            YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
     AggregatedLogFormat.LogReader reader = null;
     try {
-      reader =
-          new AggregatedLogFormat.LogReader(conf,
-              LogAggregationUtils.getRemoteNodeLogFileForApp(
-                  remoteRootLogDir, applicationId, appOwner, nodeId,
-                  LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)));
-    } catch (FileNotFoundException e) {
-      // ACLs not available till the log file is opened.
-      html.h1()
-          ._("Logs not available for "
-              + logEntity
-              + ". Aggregation may not be complete, "
-              + "Check back later or try the nodemanager at "
-              + nodeId)._();
-      return;
-    } catch (IOException e) {
-      html.h1()._("Error getting logs for " + logEntity)._();
-      LOG.error("Error getting logs for " + logEntity, e);
-      return;
-    }
+      ContainerId containerId = verifyAndGetContainerId(html);
+      NodeId nodeId = verifyAndGetNodeId(html);
+      String appOwner = verifyAndGetAppOwner(html);
+      LogLimits logLimits = verifyAndGetLogLimits(html);
+      if (containerId == null || nodeId == null || appOwner == null
+          || appOwner.isEmpty() || logLimits == null) {
+        return;
+      }
 
-    String owner = null;
-    Map<ApplicationAccessType, String> appAcls = null;
-    try {
-      owner = reader.getApplicationOwner();
-      appAcls = reader.getApplicationAcls();
-    } catch (IOException e) {
-      html.h1()._("Error getting logs for " + logEntity)._();
-      LOG.error("Error getting logs for " + logEntity, e);
-      return;
-    }
-    ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
-    aclsManager.addApplication(applicationId, appAcls);
+      ApplicationId applicationId = containerId.getApplicationAttemptId()
+          .getApplicationId();
+      String logEntity = $(ENTITY_STRING);
+      if (logEntity == null || logEntity.isEmpty()) {
+        logEntity = containerId.toString();
+      }
 
-    String remoteUser = request().getRemoteUser();
-    UserGroupInformation callerUGI = null;
-    if (remoteUser != null) {
-      callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
-    }
-    if (callerUGI != null
-        && !aclsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP,
-            owner, applicationId)) {
-      html.h1()
-          ._("User [" + remoteUser
-              + "] is not authorized to view the logs for " + logEntity)._();
-      return;
-    }
+      if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+          YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
+        html.h1()
+            ._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
+            ._();
+        return;
+      }
 
-    String desiredLogType = $(CONTAINER_LOG_TYPE);
-    try {
-      AggregatedLogFormat.ContainerLogsReader logReader =
-          reader.getContainerLogsReader(containerId);
-      if (logReader == null) {
-        html.h1()._(
-            "Logs not available for " + logEntity
-                + ". Could be caused by the rentention policy")._();
+      Path remoteRootLogDir = new Path(conf.get(
+          YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+          YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+
+      try {
+        reader = new AggregatedLogFormat.LogReader(conf,
+            LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir,
+                applicationId, appOwner, nodeId,
+                LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)));
+      } catch (FileNotFoundException e) {
+        // ACLs not available till the log file is opened.
+        html.h1()
+            ._("Logs not available for " + logEntity
+                + ". Aggregation may not be complete, "
+                + "Check back later or try the nodemanager at " + nodeId)._();
+        return;
+      } catch (IOException e) {
+        html.h1()._("Error getting logs for " + logEntity)._();
+        LOG.error("Error getting logs for " + logEntity, e);
         return;
       }
 
-      boolean foundLog = readContainerLogs(html, logReader, logLimits,
-          desiredLogType);
+      String owner = null;
+      Map<ApplicationAccessType, String> appAcls = null;
+      try {
+        owner = reader.getApplicationOwner();
+        appAcls = reader.getApplicationAcls();
+      } catch (IOException e) {
+        html.h1()._("Error getting logs for " + logEntity)._();
+        LOG.error("Error getting logs for " + logEntity, e);
+        return;
+      }
+      ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
+      aclsManager.addApplication(applicationId, appAcls);
 
-      if (!foundLog) {
-        if (desiredLogType.isEmpty()) {
-          html.h1("No logs available for container " + containerId.toString());
-        } else {
-          html.h1("Unable to locate '" + desiredLogType
-              + "' log for container " + containerId.toString());
+      String remoteUser = request().getRemoteUser();
+      UserGroupInformation callerUGI = null;
+      if (remoteUser != null) {
+        callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+      }
+      if (callerUGI != null
+          && !aclsManager.checkAccess(callerUGI,
+              ApplicationAccessType.VIEW_APP, owner, applicationId)) {
+        html.h1()
+            ._("User [" + remoteUser
+                + "] is not authorized to view the logs for " + logEntity)._();
+        return;
+      }
+
+      String desiredLogType = $(CONTAINER_LOG_TYPE);
+      try {
+        AggregatedLogFormat.ContainerLogsReader logReader = reader
+            .getContainerLogsReader(containerId);
+        if (logReader == null) {
+          html.h1()
+              ._("Logs not available for " + logEntity
+                  + ". Could be caused by the rentention policy")._();
+          return;
+        }
+
+        boolean foundLog = readContainerLogs(html, logReader, logLimits,
+            desiredLogType);
+
+        if (!foundLog) {
+          if (desiredLogType.isEmpty()) {
+            html.h1("No logs available for container " + containerId.toString());
+          } else {
+            html.h1("Unable to locate '" + desiredLogType
+                + "' log for container " + containerId.toString());
+          }
+          return;
         }
+      } catch (IOException e) {
+        html.h1()._("Error getting logs for " + logEntity)._();
+        LOG.error("Error getting logs for " + logEntity, e);
         return;
       }
-    } catch (IOException e) {
-      html.h1()._("Error getting logs for " + logEntity)._();
-      LOG.error("Error getting logs for " + logEntity, e);
-      return;
+    } finally {
+      if (reader != null) {
+        reader.close();
+      }
     }
   }
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java Mon Dec 16 20:08:58 2013
@@ -39,6 +39,7 @@ public class TestHAUtil {
   private static final String RM1_ADDRESS_UNTRIMMED = "  \t\t\n 1.2.3.4:8021  \n\t ";
   private static final String RM1_ADDRESS = RM1_ADDRESS_UNTRIMMED.trim();
   private static final String RM2_ADDRESS = "localhost:8022";
+  private static final String RM3_ADDRESS = "localhost:8033";
   private static final String RM1_NODE_ID_UNTRIMMED = "rm1 ";
   private static final String RM1_NODE_ID = RM1_NODE_ID_UNTRIMMED.trim();
   private static final String RM2_NODE_ID = "rm2";
@@ -113,8 +114,13 @@ public class TestHAUtil {
     }
 
     conf.clear();
-    conf.set(YarnConfiguration.RM_HA_IDS, RM_INVALID_NODE_ID + ","
-        + RM1_NODE_ID);
+    // simulate the case YarnConfiguration.RM_HA_ID is not set
+    conf.set(YarnConfiguration.RM_HA_IDS, RM1_NODE_ID + ","
+        + RM2_NODE_ID);
+    for (String confKey : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
+      conf.set(HAUtil.addSuffix(confKey, RM1_NODE_ID), RM1_ADDRESS);
+      conf.set(HAUtil.addSuffix(confKey, RM2_NODE_ID), RM2_ADDRESS);
+    }
     try {
       HAUtil.verifyAndSetConfiguration(conf);
     } catch (YarnRuntimeException e) {
@@ -165,6 +171,7 @@ public class TestHAUtil {
     for (String confKey : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
       conf.set(HAUtil.addSuffix(confKey, RM1_NODE_ID), RM1_ADDRESS_UNTRIMMED);
       conf.set(HAUtil.addSuffix(confKey, RM2_NODE_ID), RM2_ADDRESS);
+      conf.set(HAUtil.addSuffix(confKey, RM3_NODE_ID), RM3_ADDRESS);
     }
     try {
       HAUtil.verifyAndSetConfiguration(conf);

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java Mon Dec 16 20:08:58 2013
@@ -114,7 +114,7 @@ public class TestAggregatedLogFormat {
             testContainerId, ugi.getShortUserName());
 
     logWriter.append(logKey, logValue);
-    logWriter.closeWriter();
+    logWriter.close();
 
     // make sure permission are correct on the file
     FileStatus fsStatus =  fs.getFileStatus(remoteAppLogFile);
@@ -194,7 +194,7 @@ public class TestAggregatedLogFormat {
         ugi.getShortUserName());
     logWriter.append(logKey, logValue);
 
-    logWriter.closeWriter();
+    logWriter.close();
     
     BufferedReader in =
         new BufferedReader(new FileReader(new File(remoteAppLogFile

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java Mon Dec 16 20:08:58 2013
@@ -229,7 +229,7 @@ public class TestAggregatedLogsBlock {
 
     writer.append(new AggregatedLogFormat.LogKey("container_0_0001_01_000001"),
         new AggregatedLogFormat.LogValue(rootLogDirs, containerId,UserGroupInformation.getCurrentUser().getShortUserName()));
-    writer.closeWriter();
+    writer.close();
   }
 
   private void writeLogs(String dirName) throws Exception {

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java Mon Dec 16 20:08:58 2013
@@ -178,7 +178,7 @@ public class AppLogAggregatorImpl implem
         localAppLogDirs);
 
     if (this.writer != null) {
-      this.writer.closeWriter();
+      this.writer.close();
       LOG.info("Finished aggregate log-file for app " + this.applicationId);
     }
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java Mon Dec 16 20:08:58 2013
@@ -143,7 +143,9 @@ public class TestZKRMStateStore extends 
     conf.set(YarnConfiguration.ZK_RM_STATE_STORE_ADDRESS, hostPort);
     conf.set(YarnConfiguration.RM_HA_ID, rmId);
     for (String rpcAddress : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
-      conf.set(HAUtil.addSuffix(rpcAddress, rmId), "localhost:0");
+      for (String id : HAUtil.getRMHAIds(conf)) {
+        conf.set(HAUtil.addSuffix(rpcAddress, id), "localhost:0");
+      }
     }
     conf.set(HAUtil.addSuffix(YarnConfiguration.RM_ADMIN_ADDRESS, rmId),
         "localhost:" + adminPort);

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java Mon Dec 16 20:08:58 2013
@@ -296,7 +296,9 @@ public class MiniYARNCluster extends Com
       String hostname = MiniYARNCluster.getHostname();
       conf.set(YarnConfiguration.RM_HA_ID, rmId);
       for (String confKey : YarnConfiguration.RM_SERVICES_ADDRESS_CONF_KEYS) {
-        conf.set(HAUtil.addSuffix(confKey, rmId), hostname + ":0");
+        for (String id : HAUtil.getRMHAIds(conf)) {
+          conf.set(HAUtil.addSuffix(confKey, id), hostname + ":0");
+        }
       }
     }
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java Mon Dec 16 20:08:58 2013
@@ -33,6 +33,8 @@ import org.apache.hadoop.yarn.exceptions
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public class WebAppProxy extends AbstractService {
   public static final String FETCHER_ATTRIBUTE= "AppUrlFetcher";
   public static final String IS_SECURITY_ENABLED_ATTRIBUTE = "IsSecurityEnabled";
@@ -126,4 +128,9 @@ public class WebAppProxy extends Abstrac
       }
     }
   }
+
+  @VisibleForTesting
+  String getBindAddress() {
+    return bindAddress + ":" + port;
+  }
 }

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java Mon Dec 16 20:08:58 2013
@@ -77,7 +77,8 @@ public class WebAppProxyServer extends C
     Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
     StringUtils.startupShutdownMessage(WebAppProxyServer.class, args, LOG);
     try {
-      WebAppProxyServer proxyServer = startServer();
+      YarnConfiguration configuration = new YarnConfiguration();
+      WebAppProxyServer proxyServer = startServer(configuration);
       proxyServer.proxy.join();
     } catch (Throwable t) {
       LOG.fatal("Error starting Proxy server", t);
@@ -90,12 +91,11 @@ public class WebAppProxyServer extends C
    * 
    * @return proxy server instance.
    */
-  protected static WebAppProxyServer startServer() throws Exception {
+  protected static WebAppProxyServer startServer(Configuration configuration)
+      throws Exception {
     WebAppProxyServer proxy = new WebAppProxyServer();
     ShutdownHookManager.get().addShutdownHook(
         new CompositeServiceShutdownHook(proxy), SHUTDOWN_HOOK_PRIORITY);
-    YarnConfiguration configuration = new YarnConfiguration();
-    configuration.set(YarnConfiguration.PROXY_ADDRESS, "localhost:9099");
     proxy.init(configuration);
     proxy.start();
     return proxy;

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServer.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServer.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServer.java Mon Dec 16 20:08:58 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.we
 
 import static org.junit.Assert.assertEquals;
 
+import org.apache.hadoop.service.Service;
 import org.apache.hadoop.service.Service.STATE;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer;
@@ -29,11 +30,12 @@ import org.junit.Test;
 
 public class TestWebAppProxyServer {
   private WebAppProxyServer webAppProxy = null;
+  private final String proxyAddress = "0.0.0.0:8888";
 
   @Before
   public void setUp() throws Exception {
     YarnConfiguration conf = new YarnConfiguration();
-    conf.set(YarnConfiguration.PROXY_ADDRESS, "0.0.0.0:8888");
+    conf.set(YarnConfiguration.PROXY_ADDRESS, proxyAddress);
     webAppProxy = new WebAppProxyServer();
     webAppProxy.init(conf);
   }
@@ -47,6 +49,11 @@ public class TestWebAppProxyServer {
   public void testStart() {
     assertEquals(STATE.INITED, webAppProxy.getServiceState());
     webAppProxy.start();
+    for (Service service : webAppProxy.getServices()) {
+      if (service instanceof WebAppProxy) {
+        assertEquals(((WebAppProxy) service).getBindAddress(), proxyAddress);
+      }
+    }
     assertEquals(STATE.STARTED, webAppProxy.getServiceState());
   }
 }

Modified: hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java?rev=1551332&r1=1551331&r2=1551332&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java Mon Dec 16 20:08:58 2013
@@ -184,8 +184,10 @@ public class TestWebAppProxyServlet {
   @Test(timeout=5000)
   public void testWebAppProxyServerMainMethod() throws Exception {
     WebAppProxyServer mainServer = null;
+    Configuration conf = new YarnConfiguration();
+    conf.set(YarnConfiguration.PROXY_ADDRESS, "localhost:9099");
     try {
-      mainServer  = WebAppProxyServer.startServer();
+      mainServer  = WebAppProxyServer.startServer(conf);
       int counter = 20;
 
       URL wrongUrl = new URL("http://localhost:9099/proxy/app");