You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by as...@apache.org on 2015/12/06 08:13:34 UTC

[32/38] hadoop git commit: HDFS-9474. TestPipelinesFailover should not fail when printing debug message. (John Zhuge via Yongjun Zhang)

HDFS-9474. TestPipelinesFailover should not fail when printing debug message. (John Zhuge via Yongjun Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/59dbe8b3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/59dbe8b3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/59dbe8b3

Branch: refs/heads/yarn-2877
Commit: 59dbe8b3e96d13c2322cabd87c7f893c5a3812ba
Parents: e02bbeb
Author: Yongjun Zhang <yz...@cloudera.com>
Authored: Fri Dec 4 13:45:01 2015 -0800
Committer: Yongjun Zhang <yz...@cloudera.com>
Committed: Fri Dec 4 13:45:01 2015 -0800

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 ++
 .../namenode/ha/TestPipelinesFailover.java      | 38 ++++++++++----------
 2 files changed, 21 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/59dbe8b3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 99aa719c..34c3ff2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1717,6 +1717,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-9490. MiniDFSCluster should change block generation stamp via
     FsDatasetTestUtils. (Tony Wu via lei)
 
+    HDFS-9474. TestPipelinesFailover should not fail when printing debug
+    message. (John Zhuge via Yongjun Zhang)
+
   OPTIMIZATIONS
 
     HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/59dbe8b3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java
index f1858a7..9ece121 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java
@@ -56,7 +56,7 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
 import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread;
 import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
-import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.log4j.Level;
 import org.junit.Test;
@@ -430,29 +430,27 @@ public class TestPipelinesFailover {
     // The following section of code is to help debug HDFS-6694 about
     // this test that fails from time to time due to "too many open files".
     //
+    LOG.info("HDFS-6694 Debug Data BEGIN");
 
-    // Only collect debug data on these OSes.
-    if (Shell.LINUX || Shell.SOLARIS || Shell.MAC) {
-      System.out.println("HDFS-6694 Debug Data BEGIN===");
-      
-      String[] scmd = new String[] {"/bin/sh", "-c", "ulimit -a"};
-      ShellCommandExecutor sce = new ShellCommandExecutor(scmd);
-      sce.execute();
-      System.out.println("'ulimit -a' output:\n" + sce.getOutput());
-
-      scmd = new String[] {"hostname"};
-      sce = new ShellCommandExecutor(scmd);
-      sce.execute();
-      System.out.println("'hostname' output:\n" + sce.getOutput());
-
-      scmd = new String[] {"ifconfig", "-a"};
-      sce = new ShellCommandExecutor(scmd);
-      sce.execute();
-      System.out.println("'ifconfig' output:\n" + sce.getOutput());
+    String[][] scmds = new String[][] {
+      {"/bin/sh", "-c", "ulimit -a"},
+      {"hostname"},
+      {"ifconfig", "-a"}
+    };
 
-      System.out.println("===HDFS-6694 Debug Data END");
+    for (String[] scmd: scmds) {
+      String scmd_str = StringUtils.join(" ", scmd);
+      try {
+        ShellCommandExecutor sce = new ShellCommandExecutor(scmd);
+        sce.execute();
+        LOG.info("'" + scmd_str + "' output:\n" + sce.getOutput());
+      } catch (IOException e) {
+        LOG.warn("Error when running '" + scmd_str + "'", e);
+      }
     }
 
+    LOG.info("HDFS-6694 Debug Data END");
+
     HAStressTestHarness harness = new HAStressTestHarness();
     // Disable permissions so that another user can recover the lease.
     harness.conf.setBoolean(