You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2015/04/24 02:48:18 UTC

[1/4] hadoop git commit: YARN-3351. AppMaster tracking URL is broken in HA. (Anubhav Dhoot via kasha)

Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 ecab7d226 -> 0eae3bc0f


YARN-3351. AppMaster tracking URL is broken in HA. (Anubhav Dhoot via kasha)

(cherry picked from commit 20b49224eb90c796f042ac4251508f3979fd4787)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e1843c9a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e1843c9a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e1843c9a

Branch: refs/heads/branch-2.7
Commit: e1843c9a46e77a66e121126fdf195013b7ccb76f
Parents: ecab7d2
Author: Karthik Kambatla <ka...@apache.org>
Authored: Wed Mar 18 16:30:33 2015 -0700
Committer: Vinod Kumar Vavilapalli <vi...@apache.org>
Committed: Thu Apr 23 17:39:50 2015 -0700

----------------------------------------------------------------------
 hadoop-yarn-project/CHANGES.txt                 |  3 +
 .../hadoop/yarn/webapp/util/WebAppUtils.java    | 31 +++++++-
 .../hadoop/yarn/util/TestWebAppUtils.java       | 81 ++++++++++++++++++++
 .../server/nodemanager/webapp/NavBlock.java     |  2 +-
 4 files changed, 115 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1843c9a/hadoop-yarn-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 823102c..adc5b7a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -11,6 +11,7 @@ Release 2.7.1 - UNRELEASED
   OPTIMIZATIONS
 
   BUG FIXES
+
     YARN-3487. CapacityScheduler scheduler lock obtained unnecessarily when 
     calling getQueue (Jason Lowe via wangda)
 
@@ -26,6 +27,8 @@ Release 2.7.1 - UNRELEASED
     YARN-3522. Fixed DistributedShell to instantiate TimeLineClient as the
     correct user. (Zhijie Shen via jianhe)
 
+    YARN-3351. AppMaster tracking URL is broken in HA. (Anubhav Dhoot via kasha)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1843c9a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java
index 3aeb33e..459c110 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java
@@ -130,11 +130,23 @@ public class WebAppUtils {
     return addr;
   }
 
+  public static String getResolvedRemoteRMWebAppURLWithScheme(
+      Configuration conf) {
+    return getHttpSchemePrefix(conf)
+        + getResolvedRemoteRMWebAppURLWithoutScheme(conf);
+  }
+
   public static String getResolvedRMWebAppURLWithScheme(Configuration conf) {
     return getHttpSchemePrefix(conf)
         + getResolvedRMWebAppURLWithoutScheme(conf);
   }
   
+  public static String getResolvedRemoteRMWebAppURLWithoutScheme(
+      Configuration conf) {
+    return getResolvedRemoteRMWebAppURLWithoutScheme(conf,
+        YarnConfiguration.useHttps(conf) ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY);
+  }
+
   public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf) {
     return getResolvedRMWebAppURLWithoutScheme(conf,
         YarnConfiguration.useHttps(conf) ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY);
@@ -143,6 +155,23 @@ public class WebAppUtils {
   public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf,
       Policy httpPolicy) {
     InetSocketAddress address = null;
+    if (httpPolicy == Policy.HTTPS_ONLY) {
+      address =
+          conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS,
+              YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_ADDRESS,
+              YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_PORT);
+    } else {
+      address =
+          conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_ADDRESS,
+              YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS,
+              YarnConfiguration.DEFAULT_RM_WEBAPP_PORT);      
+    }
+    return getResolvedAddress(address);
+  }
+
+  public static String getResolvedRemoteRMWebAppURLWithoutScheme(Configuration conf,
+      Policy httpPolicy) {
+    InetSocketAddress address = null;
     String rmId = null;
     if (HAUtil.isHAEnabled(conf)) {
       // If HA enabled, pick one of the RM-IDs and rely on redirect to go to
@@ -167,7 +196,7 @@ public class WebAppUtils {
                   : HAUtil.addSuffix(
                   YarnConfiguration.RM_WEBAPP_ADDRESS, rmId),
               YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS,
-              YarnConfiguration.DEFAULT_RM_WEBAPP_PORT);      
+              YarnConfiguration.DEFAULT_RM_WEBAPP_PORT);
     }
     return getResolvedAddress(address);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1843c9a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWebAppUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWebAppUtils.java
new file mode 100644
index 0000000..169787f
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWebAppUtils.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.net.UnknownHostException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class TestWebAppUtils {
+
+  private static final String RM1_NODE_ID = "rm1";
+  private static final String RM2_NODE_ID = "rm2";
+
+  // Because WebAppUtils#getResolvedAddress tries to resolve the hostname, we add a static mapping for dummy hostnames
+  // to make this test run anywhere without having to give some resolvable hostnames
+  private static String dummyHostNames[] = {"host1", "host2", "host3"};
+  private static final String anyIpAddress = "1.2.3.4";
+  private static Map<String, String> savedStaticResolution = new HashMap<>();
+
+  @BeforeClass
+  public static void initializeDummyHostnameResolution() throws Exception {
+    String previousIpAddress;
+    for (String hostName : dummyHostNames) {
+      if (null != (previousIpAddress = NetUtils.getStaticResolution(hostName))) {
+        savedStaticResolution.put(hostName, previousIpAddress);
+      }
+      NetUtils.addStaticResolution(hostName, anyIpAddress);
+    }
+  }
+
+  @AfterClass
+  public static void restoreDummyHostnameResolution() throws Exception {
+    for (Map.Entry<String, String> hostnameToIpEntry : savedStaticResolution.entrySet()) {
+      NetUtils.addStaticResolution(hostnameToIpEntry.getKey(), hostnameToIpEntry.getValue());
+    }
+  }
+
+  @Test
+  public void TestRMWebAppURLRemoteAndLocal() throws UnknownHostException {
+    Configuration configuration = new Configuration();
+    final String rmAddress = "host1:8088";
+    configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS, rmAddress);
+    final String rm1Address = "host2:8088";
+    final String rm2Address = "host3:8088";
+    configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS + "." + RM1_NODE_ID, rm1Address);
+    configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS + "." + RM2_NODE_ID, rm2Address);
+    configuration.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
+    configuration.set(YarnConfiguration.RM_HA_IDS, RM1_NODE_ID + "," + RM2_NODE_ID);
+
+    String rmRemoteUrl = WebAppUtils.getResolvedRemoteRMWebAppURLWithoutScheme(configuration);
+    Assert.assertEquals("ResolvedRemoteRMWebAppUrl should resolve to the first HA RM address", rm1Address, rmRemoteUrl);
+
+    String rmLocalUrl = WebAppUtils.getResolvedRMWebAppURLWithoutScheme(configuration);
+    Assert.assertEquals("ResolvedRMWebAppUrl should resolve to the default RM webapp address", rmAddress, rmLocalUrl);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1843c9a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java
index 1c3b63b..b8a10f1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java
@@ -38,7 +38,7 @@ public class NavBlock extends HtmlBlock implements YarnWebParams {
   protected void render(Block html) {
 	
     String RMWebAppURL =
-        WebAppUtils.getResolvedRMWebAppURLWithScheme(this.conf);
+        WebAppUtils.getResolvedRemoteRMWebAppURLWithScheme(this.conf);
 	  html
       .div("#nav")
       .h3()._("ResourceManager")._()


[2/4] hadoop git commit: YARN-3382. Some of UserMetricsInfo metrics are incorrectly set to root queue metrics. Contributed by Rohit Agarwal

Posted by vi...@apache.org.
YARN-3382. Some of UserMetricsInfo metrics are incorrectly set to root queue metrics. Contributed by Rohit Agarwal

(cherry picked from commit 944a16579fdb54c1c420162ef830ef2ac5451c69)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6cc0e9ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6cc0e9ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6cc0e9ba

Branch: refs/heads/branch-2.7
Commit: 6cc0e9baa1041a9e08fe3497b35d1ea2b78c7db1
Parents: e1843c9
Author: Jian He <ji...@apache.org>
Authored: Mon Apr 13 10:53:34 2015 -0700
Committer: Vinod Kumar Vavilapalli <vi...@apache.org>
Committed: Thu Apr 23 17:40:56 2015 -0700

----------------------------------------------------------------------
 hadoop-yarn-project/CHANGES.txt                           |  3 +++
 .../resourcemanager/webapp/dao/UserMetricsInfo.java       | 10 +++++-----
 2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cc0e9ba/hadoop-yarn-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index adc5b7a..1594a6c 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -29,6 +29,9 @@ Release 2.7.1 - UNRELEASED
 
     YARN-3351. AppMaster tracking URL is broken in HA. (Anubhav Dhoot via kasha)
 
+    YARN-3382. Some of UserMetricsInfo metrics are incorrectly set to root
+    queue metrics. (Rohit Agarwal via jianhe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cc0e9ba/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java
index 73a83d7..bfa5bd2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java
@@ -63,11 +63,11 @@ public class UserMetricsInfo {
       this.userMetricsAvailable = true;
 
       this.appsSubmitted = userMetrics.getAppsSubmitted();
-      this.appsCompleted = metrics.getAppsCompleted();
-      this.appsPending = metrics.getAppsPending();
-      this.appsRunning = metrics.getAppsRunning();
-      this.appsFailed = metrics.getAppsFailed();
-      this.appsKilled = metrics.getAppsKilled();
+      this.appsCompleted = userMetrics.getAppsCompleted();
+      this.appsPending = userMetrics.getAppsPending();
+      this.appsRunning = userMetrics.getAppsRunning();
+      this.appsFailed = userMetrics.getAppsFailed();
+      this.appsKilled = userMetrics.getAppsKilled();
 
       this.runningContainers = userMetrics.getAllocatedContainers();
       this.pendingContainers = userMetrics.getPendingContainers();


[4/4] hadoop git commit: MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options which is a regression from MR1 (zxu via rkanter)

Posted by vi...@apache.org.
MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options which is a regression from MR1 (zxu via rkanter)

(cherry picked from commit d50e8f09287deeb51012d08e326a2ed71a6da869)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0eae3bc0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0eae3bc0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0eae3bc0

Branch: refs/heads/branch-2.7
Commit: 0eae3bc0f60b0261c834242c08e815b6ce2372f6
Parents: 5cf35b9
Author: Robert Kanter <rk...@apache.org>
Authored: Mon Apr 20 14:14:08 2015 -0700
Committer: Vinod Kumar Vavilapalli <vi...@apache.org>
Committed: Thu Apr 23 17:47:00 2015 -0700

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 +
 .../mapred/LocalDistributedCacheManager.java    |  6 --
 .../hadoop/mapreduce/JobResourceUploader.java   |  2 +-
 .../hadoop/mapred/TestLocalJobSubmission.java   | 92 ++++++++++++++++++++
 4 files changed, 96 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eae3bc0/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 82c3064..518b6e3 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -14,6 +14,9 @@ Release 2.7.1 - UNRELEASED
 
     MAPREDUCE-6300. Task list sort by task id broken. (Siqi Li via aajisaka)
 
+    MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options
+    which is a regression from MR1 (zxu via rkanter)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eae3bc0/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java
index 1055516..8606ede 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java
@@ -100,18 +100,12 @@ class LocalDistributedCacheManager {
     Path[] archiveClassPaths = DistributedCache.getArchiveClassPaths(conf);
     if (archiveClassPaths != null) {
       for (Path p : archiveClassPaths) {
-        FileSystem remoteFS = p.getFileSystem(conf);
-        p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
-            remoteFS.getWorkingDirectory()));
         classpaths.put(p.toUri().getPath().toString(), p);
       }
     }
     Path[] fileClassPaths = DistributedCache.getFileClassPaths(conf);
     if (fileClassPaths != null) {
       for (Path p : fileClassPaths) {
-        FileSystem remoteFS = p.getFileSystem(conf);
-        p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
-            remoteFS.getWorkingDirectory()));
         classpaths.put(p.toUri().getPath().toString(), p);
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eae3bc0/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
index eebdf88..134de35 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java
@@ -127,7 +127,7 @@ class JobResourceUploader {
         Path tmp = new Path(tmpjars);
         Path newPath = copyRemoteFiles(libjarsDir, tmp, conf, replication);
         DistributedCache.addFileToClassPath(
-            new Path(newPath.toUri().getPath()), conf);
+            new Path(newPath.toUri().getPath()), conf, jtFs);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eae3bc0/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java
new file mode 100644
index 0000000..e06551a
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.net.URL;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import org.apache.hadoop.mapreduce.SleepJob;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+/**
+ * check for the job submission options of
+ * -jt local -libjars
+ */
+public class TestLocalJobSubmission {
+  private static Path TEST_ROOT_DIR =
+      new Path(System.getProperty("test.build.data","/tmp"));
+
+  @Before
+  public void configure() throws Exception {
+  }
+
+  @After
+  public void cleanup() {
+  }
+
+  /**
+   * test the local job submission options of
+   * -jt local -libjars
+   * @throws IOException
+   */
+  @Test
+  public void testLocalJobLibjarsOption() throws IOException {
+    Path jarPath = makeJar(new Path(TEST_ROOT_DIR, "test.jar"));
+
+    Configuration conf = new Configuration();
+    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "hdfs://testcluster");
+    final String[] args = {
+        "-jt" , "local", "-libjars", jarPath.toString(),
+        "-m", "1", "-r", "1", "-mt", "1", "-rt", "1"
+    };
+    int res = -1;
+    try {
+      res = ToolRunner.run(conf, new SleepJob(), args);
+    } catch (Exception e) {
+      System.out.println("Job failed with " + e.getLocalizedMessage());
+      e.printStackTrace(System.out);
+      fail("Job failed");
+    }
+    assertEquals("dist job res is not 0:", 0, res);
+  }
+
+  private Path makeJar(Path p) throws IOException {
+    FileOutputStream fos = new FileOutputStream(new File(p.toString()));
+    JarOutputStream jos = new JarOutputStream(fos);
+    ZipEntry ze = new ZipEntry("test.jar.inside");
+    jos.putNextEntry(ze);
+    jos.write(("inside the jar!").getBytes());
+    jos.closeEntry();
+    jos.close();
+    return p;
+  }
+}


[3/4] hadoop git commit: YARN-3472. Fixed possible leak in DelegationTokenRenewer#allTokens. Contributed by Rohith Sharmaks

Posted by vi...@apache.org.
YARN-3472. Fixed possible leak in DelegationTokenRenewer#allTokens. Contributed by Rohith Sharmaks

(cherry picked from commit a1afbc48b53f6bdbd30dc8eb56a7621d49c5d6db)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5cf35b98
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5cf35b98
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5cf35b98

Branch: refs/heads/branch-2.7
Commit: 5cf35b98e1e751b28e5b2ddeb75b9be13cf98734
Parents: 6cc0e9b
Author: Jian He <ji...@apache.org>
Authored: Mon Apr 13 14:07:17 2015 -0700
Committer: Vinod Kumar Vavilapalli <vi...@apache.org>
Committed: Thu Apr 23 17:43:41 2015 -0700

----------------------------------------------------------------------
 hadoop-yarn-project/CHANGES.txt                          |  3 +++
 .../resourcemanager/security/DelegationTokenRenewer.java |  1 +
 .../security/TestDelegationTokenRenewer.java             | 11 ++++++++++-
 3 files changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5cf35b98/hadoop-yarn-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 1594a6c..d9d8f5f 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -32,6 +32,9 @@ Release 2.7.1 - UNRELEASED
     YARN-3382. Some of UserMetricsInfo metrics are incorrectly set to root
     queue metrics. (Rohit Agarwal via jianhe)
 
+    YARN-3472. Fixed possible leak in DelegationTokenRenewer#allTokens.
+    (Rohith Sharmaks via jianhe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5cf35b98/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
index d49ecfc..e307645 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
@@ -577,6 +577,7 @@ public class DelegationTokenRenewer extends AbstractService {
             DelegationTokenToRenew t = iter.next();
             if (t.token.getKind().equals(new Text("HDFS_DELEGATION_TOKEN"))) {
               iter.remove();
+              allTokens.remove(t.token);
               t.cancelTimer();
               LOG.info("Removed expiring token " + t);
             }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5cf35b98/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
index bc9c295..f2c0a30 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
@@ -906,7 +906,16 @@ public class TestDelegationTokenRenewer {
           new HashMap<ApplicationAccessType, String>(), false, "default", 1,
           credentials);
 
-    // wait for the initial expiring hdfs token to be removed.
+    // wait for the initial expiring hdfs token to be removed from allTokens
+    GenericTestUtils.waitFor(new Supplier<Boolean>() {
+      public Boolean get() {
+        return
+            rm.getRMContext().getDelegationTokenRenewer().getAllTokens()
+            .get(token1) == null;
+      }
+    }, 1000, 20000);
+
+    // wait for the initial expiring hdfs token to be removed from appTokens
     GenericTestUtils.waitFor(new Supplier<Boolean>() {
       public Boolean get() {
         return !rm.getRMContext().getDelegationTokenRenewer()