You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by br...@apache.org on 2016/10/13 16:16:00 UTC

[2/2] hadoop git commit: 10986. DFSAdmin should log detailed error message if any. Contributed by MingLiang Liu

10986. DFSAdmin should log detailed error message if any. Contributed by MingLiang Liu

(cherry picked from commit 29aa11b1a252e007ed62fad362096ca43aa408af)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b60e545a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b60e545a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b60e545a

Branch: refs/heads/branch-2
Commit: b60e545a08140b2e1c268a2310e80d6ed659eb39
Parents: a6197c1
Author: Brahma Reddy Battula <br...@apache.org>
Authored: Thu Oct 13 21:39:50 2016 +0530
Committer: Brahma Reddy Battula <br...@apache.org>
Committed: Thu Oct 13 21:41:25 2016 +0530

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/tools/DFSAdmin.java  |   8 +-
 .../apache/hadoop/hdfs/tools/TestDFSAdmin.java  | 106 +++++++++----------
 2 files changed, 51 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b60e545a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
index bd3ed15..3d956a0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
@@ -911,8 +911,7 @@ public class DFSAdmin extends FsShell {
       System.out.println("Balancer bandwidth is " + bandwidth
           + " bytes per second.");
     } catch (IOException ioe) {
-      System.err.println("Datanode unreachable.");
-      return -1;
+      throw new IOException("Datanode unreachable. " + ioe, ioe);
     }
     return 0;
   }
@@ -2178,7 +2177,7 @@ public class DFSAdmin extends FsShell {
       dnProxy.evictWriters();
       System.out.println("Requested writer eviction to datanode " + dn);
     } catch (IOException ioe) {
-      return -1;
+      throw new IOException("Datanode unreachable. " + ioe, ioe);
     }
     return 0;
   }
@@ -2189,8 +2188,7 @@ public class DFSAdmin extends FsShell {
       DatanodeLocalInfo dnInfo = dnProxy.getDatanodeInfo();
       System.out.println(dnInfo.getDatanodeLocalReport());
     } catch (IOException ioe) {
-      System.err.println("Datanode unreachable.");
-      return -1;
+      throw new IOException("Datanode unreachable. " + ioe, ioe);
     }
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b60e545a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
index b49f73d..dca42ea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.tools;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
@@ -79,6 +80,7 @@ public class TestDFSAdmin {
   @Before
   public void setUp() throws Exception {
     conf = new Configuration();
+    conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
     restartCluster();
 
     admin = new DFSAdmin();
@@ -116,7 +118,7 @@ public class TestDFSAdmin {
     if (cluster != null) {
       cluster.shutdown();
     }
-    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
     cluster.waitActive();
     datanode = cluster.getDataNodes().get(0);
     namenode = cluster.getNameNode();
@@ -171,70 +173,58 @@ public class TestDFSAdmin {
   @Test(timeout = 30000)
   public void testGetDatanodeInfo() throws Exception {
     redirectStream();
-    final Configuration dfsConf = new HdfsConfiguration();
-    final int numDn = 2;
-
-    /* init cluster */
-    try (MiniDFSCluster miniCluster = new MiniDFSCluster.Builder(dfsConf)
-        .numDataNodes(numDn).build()) {
-
-      miniCluster.waitActive();
-      assertEquals(numDn, miniCluster.getDataNodes().size());
-      final DFSAdmin dfsAdmin = new DFSAdmin(dfsConf);
+    final DFSAdmin dfsAdmin = new DFSAdmin(conf);
 
-      /* init reused vars */
-      List<String> outs = null;
-      int ret;
-
-      /**
-       * test erroneous run
-       */
+    for (int i = 0; i < cluster.getDataNodes().size(); i++) {
       resetStream();
-      outs = Lists.newArrayList();
-
-      /* invoke getDatanodeInfo */
-      ret = ToolRunner.run(
-          dfsAdmin,
-          new String[] {"-getDatanodeInfo", "128.0.0.1:1234"});
+      final DataNode dn = cluster.getDataNodes().get(i);
+      final String addr = String.format(
+          "%s:%d",
+          dn.getXferAddress().getHostString(),
+          dn.getIpcPort());
+      final int ret = ToolRunner.run(dfsAdmin,
+          new String[]{"-getDatanodeInfo", addr});
+      assertEquals(0, ret);
 
       /* collect outputs */
+      final List<String> outs = Lists.newArrayList();
       scanIntoList(out, outs);
-
       /* verify results */
+      assertEquals(
+          "One line per DataNode like: Uptime: XXX, Software version: x.y.z,"
+              + " Config version: core-x.y.z,hdfs-x",
+          1, outs.size());
+      assertThat(outs.get(0),
+          is(allOf(containsString("Uptime:"),
+              containsString("Software version"),
+              containsString("Config version"))));
+    }
+  }
+
+  /**
+   * Test that if datanode is not reachable, some DFSAdmin commands will fail
+   * elegantly with non-zero ret error code along with exception error message.
+   */
+  @Test(timeout = 60000)
+  public void testDFSAdminUnreachableDatanode() throws Exception {
+    redirectStream();
+    final DFSAdmin dfsAdmin = new DFSAdmin(conf);
+    for (String command : new String[]{"-getDatanodeInfo",
+        "-evictWriters", "-getBalancerBandwidth"}) {
+      // Connecting to Xfer port instead of IPC port will get
+      // Datanode unreachable. java.io.EOFException
+      final String dnDataAddr = datanode.getXferAddress().getHostString() + ":"
+          + datanode.getXferPort();
+      resetStream();
+      final List<String> outs = Lists.newArrayList();
+      final int ret = ToolRunner.run(dfsAdmin,
+          new String[]{command, dnDataAddr});
       assertEquals(-1, ret);
-      assertTrue("Unexpected getDatanodeInfo stdout", outs.isEmpty());
-
-      /**
-       * test normal run
-       */
-      for (int i = 0; i < numDn; i++) {
-        resetStream();
-        final DataNode dn = miniCluster.getDataNodes().get(i);
-
-        /* invoke getDatanodeInfo */
-        final String addr = String.format(
-            "%s:%d",
-            dn.getXferAddress().getHostString(),
-            dn.getIpcPort());
-        ret = ToolRunner.run(
-            dfsAdmin,
-            new String[] {"-getDatanodeInfo", addr});
-
-        /* collect outputs */
-        outs = Lists.newArrayList();
-        scanIntoList(out, outs);
-
-        /* verify results */
-        assertEquals(0, ret);
-        assertEquals(
-            "One line per DataNode like: Uptime: XXX, Software version: x.y.z,"
-                + " Config version: core-x.y.z,hdfs-x",
-            1, outs.size());
-        assertThat(outs.get(0),
-            is(allOf(containsString("Uptime:"),
-                containsString("Software version"),
-                containsString("Config version"))));
-      }
+
+      scanIntoList(out, outs);
+      assertTrue("Unexpected " + command + " stdout: " + out, outs.isEmpty());
+      assertTrue("Unexpected " + command + " stderr: " + err,
+          err.toString().contains("Exception"));
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org