You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ww...@apache.org on 2019/03/13 09:10:57 UTC

[hadoop] branch trunk updated: MAPREDUCE-7192. JobHistoryServer attempts page support jump to containers log page in NM when logAggregation is disable. Contributed by Jiandan Yang.

This is an automated email from the ASF dual-hosted git repository.

wwei pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 159a715  MAPREDUCE-7192. JobHistoryServer attempts page support jump to containers log page in NM when logAggregation is disable. Contributed by Jiandan Yang.
159a715 is described below

commit 159a715eef23046ee9909289defe62e68a5df533
Author: Weiwei Yang <ww...@apache.org>
AuthorDate: Wed Mar 13 17:00:35 2019 +0800

    MAPREDUCE-7192. JobHistoryServer attempts page support jump to containers log page in NM when logAggregation is disable. Contributed by Jiandan Yang.
---
 .../mapreduce/v2/hs/webapp/HsAttemptsPage.java     |  5 ++--
 .../hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java  | 28 ++++++++++++++------
 .../hadoop/mapreduce/v2/hs/webapp/TestBlocks.java  | 30 +++++++++++++++++++---
 3 files changed, 50 insertions(+), 13 deletions(-)

diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java
index 9233fd3..b5bf719 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java
@@ -26,6 +26,7 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
@@ -46,8 +47,8 @@ import com.google.inject.Inject;
 public class HsAttemptsPage extends HsTaskPage {
   static class FewAttemptsBlock extends HsTaskPage.AttemptsBlock {
     @Inject
-    FewAttemptsBlock(App ctx) {
-      super(ctx);
+    FewAttemptsBlock(App ctx, Configuration conf) {
+      super(ctx, conf);
     }
 
     /*
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
index 5b0c59f..65fd1ea 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
@@ -30,6 +30,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
 import java.util.Collection;
 
 import org.apache.commons.text.StringEscapeUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -38,6 +39,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.MapTaskAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
@@ -61,10 +63,12 @@ public class HsTaskPage extends HsView {
    */
   static class AttemptsBlock extends HtmlBlock {
     final App app;
+    final Configuration conf;
 
     @Inject
-    AttemptsBlock(App ctx) {
+    AttemptsBlock(App ctx, Configuration conf) {
       app = ctx;
+      this.conf = conf;
     }
 
     @Override
@@ -152,13 +156,21 @@ public class HsTaskPage extends HsView {
               StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
 
         .append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
-        .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
-
-        .append("<a class='logslink' href='").append(url("logs", nodeIdString
-          , containerIdString, taid, app.getJob().getUserName()))
-          .append("'>logs</a>\",\"")
-
-          .append(attemptStartTime).append("\",\"");
+        .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"");
+
+         String logsUrl = url("logs", nodeIdString, containerIdString, taid,
+             app.getJob().getUserName());
+         if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+             YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
+           logsUrl =
+               url(MRWebAppUtil.getYARNWebappScheme(), nodeHttpAddr, "node",
+                   "containerlogs", containerIdString,
+                   app.getJob().getUserName());
+         }
+         attemptsTableData.append("<a class='logslink' href='").append(logsUrl)
+             .append("'>logs</a>\",\"");
+
+        attemptsTableData.append(attemptStartTime).append("\",\"");
 
         if(type == TaskType.REDUCE) {
           attemptsTableData.append(shuffleFinishTime).append("\",\"")
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
index 18687ff..337f182 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
 import org.apache.hadoop.yarn.webapp.View.ViewContext;
 import org.apache.hadoop.yarn.webapp.Controller;
@@ -193,7 +194,9 @@ public class TestBlocks {
     when(job.getUserName()).thenReturn("User");
     app.setJob(job);
 
-    AttemptsBlockForTest block = new AttemptsBlockForTest(app);
+    Configuration conf = new Configuration();
+    conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
+    AttemptsBlockForTest block = new AttemptsBlockForTest(app, conf);
     block.addParameter(AMParams.TASK_TYPE, "r");
 
     PrintWriter pWriter = new PrintWriter(data);
@@ -212,6 +215,27 @@ public class TestBlocks {
     assertTrue(data.toString().contains("100010"));
     assertTrue(data.toString().contains("100011"));
     assertTrue(data.toString().contains("100012"));
+    data.reset();
+    conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false);
+    block = new AttemptsBlockForTest(app, conf);
+    block.addParameter(AMParams.TASK_TYPE, "r");
+
+    pWriter = new PrintWriter(data);
+    html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
+
+    block.render(html);
+    pWriter.flush();
+    // should be printed information about attempts
+    assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
+    assertTrue(data.toString().contains("SUCCEEDED"));
+    assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
+    assertTrue(data.toString().contains("Processed 128\\/128 records &lt;p&gt; \\n"));
+    assertTrue(data.toString().contains(
+        "Node address:node:containerlogs:container_0_0005_01_000001:User:"));
+    assertTrue(data.toString().contains("100002"));
+    assertTrue(data.toString().contains("100010"));
+    assertTrue(data.toString().contains("100011"));
+    assertTrue(data.toString().contains("100012"));
   }
 
   /**
@@ -438,8 +462,8 @@ public class TestBlocks {
       return value == null ? defaultValue : value;
     }
 
-    public AttemptsBlockForTest(App ctx) {
-      super(ctx);
+    public AttemptsBlockForTest(App ctx, Configuration conf) {
+      super(ctx, conf);
     }
 
     @Override


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org