You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by um...@apache.org on 2016/10/17 23:14:35 UTC
[16/50] hadoop git commit: HDFS-10986. DFSAdmin should log detailed
error message if any. Contributed by MingLiang Liu
HDFS-10986. DFSAdmin should log detailed error message if any. Contributed by MingLiang Liu
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/12912540
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/12912540
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/12912540
Branch: refs/heads/HDFS-10285
Commit: 129125404244f35ee63b8f0491a095371685e9ba
Parents: 9454dc5
Author: Brahma Reddy Battula <br...@apache.org>
Authored: Thu Oct 13 21:39:50 2016 +0530
Committer: Brahma Reddy Battula <br...@apache.org>
Committed: Thu Oct 13 22:05:00 2016 +0530
----------------------------------------------------------------------
.../org/apache/hadoop/hdfs/tools/DFSAdmin.java | 8 +-
.../apache/hadoop/hdfs/tools/TestDFSAdmin.java | 106 +++++++++----------
2 files changed, 51 insertions(+), 63 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/12912540/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
index 32401dc..a60f24b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
@@ -936,8 +936,7 @@ public class DFSAdmin extends FsShell {
System.out.println("Balancer bandwidth is " + bandwidth
+ " bytes per second.");
} catch (IOException ioe) {
- System.err.println("Datanode unreachable.");
- return -1;
+ throw new IOException("Datanode unreachable. " + ioe, ioe);
}
return 0;
}
@@ -2207,7 +2206,7 @@ public class DFSAdmin extends FsShell {
dnProxy.evictWriters();
System.out.println("Requested writer eviction to datanode " + dn);
} catch (IOException ioe) {
- return -1;
+ throw new IOException("Datanode unreachable. " + ioe, ioe);
}
return 0;
}
@@ -2218,8 +2217,7 @@ public class DFSAdmin extends FsShell {
DatanodeLocalInfo dnInfo = dnProxy.getDatanodeInfo();
System.out.println(dnInfo.getDatanodeLocalReport());
} catch (IOException ioe) {
- System.err.println("Datanode unreachable.");
- return -1;
+ throw new IOException("Datanode unreachable. " + ioe, ioe);
}
return 0;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/12912540/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
index b49f73d..dca42ea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdmin.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs.tools;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
@@ -79,6 +80,7 @@ public class TestDFSAdmin {
@Before
public void setUp() throws Exception {
conf = new Configuration();
+ conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
restartCluster();
admin = new DFSAdmin();
@@ -116,7 +118,7 @@ public class TestDFSAdmin {
if (cluster != null) {
cluster.shutdown();
}
- cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
datanode = cluster.getDataNodes().get(0);
namenode = cluster.getNameNode();
@@ -171,70 +173,58 @@ public class TestDFSAdmin {
@Test(timeout = 30000)
public void testGetDatanodeInfo() throws Exception {
redirectStream();
- final Configuration dfsConf = new HdfsConfiguration();
- final int numDn = 2;
-
- /* init cluster */
- try (MiniDFSCluster miniCluster = new MiniDFSCluster.Builder(dfsConf)
- .numDataNodes(numDn).build()) {
-
- miniCluster.waitActive();
- assertEquals(numDn, miniCluster.getDataNodes().size());
- final DFSAdmin dfsAdmin = new DFSAdmin(dfsConf);
+ final DFSAdmin dfsAdmin = new DFSAdmin(conf);
- /* init reused vars */
- List<String> outs = null;
- int ret;
-
- /**
- * test erroneous run
- */
+ for (int i = 0; i < cluster.getDataNodes().size(); i++) {
resetStream();
- outs = Lists.newArrayList();
-
- /* invoke getDatanodeInfo */
- ret = ToolRunner.run(
- dfsAdmin,
- new String[] {"-getDatanodeInfo", "128.0.0.1:1234"});
+ final DataNode dn = cluster.getDataNodes().get(i);
+ final String addr = String.format(
+ "%s:%d",
+ dn.getXferAddress().getHostString(),
+ dn.getIpcPort());
+ final int ret = ToolRunner.run(dfsAdmin,
+ new String[]{"-getDatanodeInfo", addr});
+ assertEquals(0, ret);
/* collect outputs */
+ final List<String> outs = Lists.newArrayList();
scanIntoList(out, outs);
-
/* verify results */
+ assertEquals(
+ "One line per DataNode like: Uptime: XXX, Software version: x.y.z,"
+ + " Config version: core-x.y.z,hdfs-x",
+ 1, outs.size());
+ assertThat(outs.get(0),
+ is(allOf(containsString("Uptime:"),
+ containsString("Software version"),
+ containsString("Config version"))));
+ }
+ }
+
+ /**
+ * Test that if datanode is not reachable, some DFSAdmin commands will fail
+ * elegantly with non-zero ret error code along with exception error message.
+ */
+ @Test(timeout = 60000)
+ public void testDFSAdminUnreachableDatanode() throws Exception {
+ redirectStream();
+ final DFSAdmin dfsAdmin = new DFSAdmin(conf);
+ for (String command : new String[]{"-getDatanodeInfo",
+ "-evictWriters", "-getBalancerBandwidth"}) {
+ // Connecting to Xfer port instead of IPC port will get
+ // Datanode unreachable. java.io.EOFException
+ final String dnDataAddr = datanode.getXferAddress().getHostString() + ":"
+ + datanode.getXferPort();
+ resetStream();
+ final List<String> outs = Lists.newArrayList();
+ final int ret = ToolRunner.run(dfsAdmin,
+ new String[]{command, dnDataAddr});
assertEquals(-1, ret);
- assertTrue("Unexpected getDatanodeInfo stdout", outs.isEmpty());
-
- /**
- * test normal run
- */
- for (int i = 0; i < numDn; i++) {
- resetStream();
- final DataNode dn = miniCluster.getDataNodes().get(i);
-
- /* invoke getDatanodeInfo */
- final String addr = String.format(
- "%s:%d",
- dn.getXferAddress().getHostString(),
- dn.getIpcPort());
- ret = ToolRunner.run(
- dfsAdmin,
- new String[] {"-getDatanodeInfo", addr});
-
- /* collect outputs */
- outs = Lists.newArrayList();
- scanIntoList(out, outs);
-
- /* verify results */
- assertEquals(0, ret);
- assertEquals(
- "One line per DataNode like: Uptime: XXX, Software version: x.y.z,"
- + " Config version: core-x.y.z,hdfs-x",
- 1, outs.size());
- assertThat(outs.get(0),
- is(allOf(containsString("Uptime:"),
- containsString("Software version"),
- containsString("Config version"))));
- }
+
+ scanIntoList(out, outs);
+ assertTrue("Unexpected " + command + " stdout: " + out, outs.isEmpty());
+ assertTrue("Unexpected " + command + " stderr: " + err,
+ err.toString().contains("Exception"));
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org