You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by to...@apache.org on 2012/04/07 02:15:30 UTC
svn commit: r1310648 - in
/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt
src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
Author: todd
Date: Sat Apr 7 00:15:30 2012
New Revision: 1310648
URL: http://svn.apache.org/viewvc?rev=1310648&view=rev
Log:
HDFS-3226. Allow GetConf tool to print arbitrary keys. Contributed by Todd Lipcon.
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1310648&r1=1310647&r2=1310648&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Sat Apr 7 00:15:30 2012
@@ -333,6 +333,8 @@ Release 2.0.0 - UNRELEASED
HDFS-3050. rework OEV to share more code with the NameNode.
(Colin Patrick McCabe via eli)
+ HDFS-3226. Allow GetConf tool to print arbitrary keys (todd)
+
OPTIMIZATIONS
HDFS-3024. Improve performance of stringification in addStoredBlock (todd)
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java?rev=1310648&r1=1310647&r2=1310648&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java Sat Apr 7 00:15:30 2012
@@ -21,10 +21,12 @@ import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hdfs.DFSUtil;
@@ -70,7 +72,8 @@ public class GetConf extends Configured
EXCLUDE_FILE("-excludeFile",
"gets the exclude file path that defines the datanodes " +
"that need to decommissioned."),
- NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses");
+ NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"),
+ CONFKEY("-confKey [key]", "gets a specific key from the configuration");
private static Map<String, CommandHandler> map;
static {
@@ -87,6 +90,8 @@ public class GetConf extends Configured
new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE"));
map.put(NNRPCADDRESSES.getName().toLowerCase(),
new NNRpcAddressesCommandHandler());
+ map.put(CONFKEY.getName().toLowerCase(),
+ new PrintConfKeyCommandHandler());
}
private final String cmd;
@@ -98,6 +103,10 @@ public class GetConf extends Configured
}
public String getName() {
+ return cmd.split(" ")[0];
+ }
+
+ public String getUsage() {
return cmd;
}
@@ -105,8 +114,8 @@ public class GetConf extends Configured
return description;
}
- public static CommandHandler getHandler(String name) {
- return map.get(name.toLowerCase());
+ public static CommandHandler getHandler(String cmd) {
+ return map.get(cmd.toLowerCase());
}
}
@@ -118,7 +127,7 @@ public class GetConf extends Configured
StringBuilder usage = new StringBuilder(DESCRIPTION);
usage.append("\nhadoop getconf \n");
for (Command cmd : Command.values()) {
- usage.append("\t[" + cmd.getName() + "]\t\t\t" + cmd.getDescription()
+ usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription()
+ "\n");
}
USAGE = usage.toString();
@@ -128,7 +137,7 @@ public class GetConf extends Configured
* Handler to return value for key corresponding to the {@link Command}
*/
static class CommandHandler {
- final String key; // Configuration key to lookup
+ String key; // Configuration key to lookup
CommandHandler() {
this(null);
@@ -138,18 +147,30 @@ public class GetConf extends Configured
this.key = key;
}
- final int doWork(GetConf tool) {
+ final int doWork(GetConf tool, String[] args) {
try {
- return doWorkInternal(tool);
+ checkArgs(args);
+
+ return doWorkInternal(tool, args);
} catch (Exception e) {
tool.printError(e.getMessage());
}
return -1;
}
+
+ protected void checkArgs(String args[]) {
+ if (args.length > 0) {
+ throw new HadoopIllegalArgumentException(
+ "Did not expect argument: " + args[0]);
+ }
+ }
+
- /** Method to be overridden by sub classes for specific behavior */
- int doWorkInternal(GetConf tool) throws Exception {
- String value = tool.getConf().get(key);
+ /** Method to be overridden by sub classes for specific behavior
+ * @param args */
+ int doWorkInternal(GetConf tool, String[] args) throws Exception {
+
+ String value = tool.getConf().getTrimmed(key);
if (value != null) {
tool.printOut(value);
return 0;
@@ -164,7 +185,7 @@ public class GetConf extends Configured
*/
static class NameNodesCommandHandler extends CommandHandler {
@Override
- int doWorkInternal(GetConf tool) throws IOException {
+ int doWorkInternal(GetConf tool, String []args) throws IOException {
tool.printMap(DFSUtil.getNNServiceRpcAddresses(tool.getConf()));
return 0;
}
@@ -175,7 +196,7 @@ public class GetConf extends Configured
*/
static class BackupNodesCommandHandler extends CommandHandler {
@Override
- public int doWorkInternal(GetConf tool) throws IOException {
+ public int doWorkInternal(GetConf tool, String []args) throws IOException {
tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf()));
return 0;
}
@@ -186,7 +207,7 @@ public class GetConf extends Configured
*/
static class SecondaryNameNodesCommandHandler extends CommandHandler {
@Override
- public int doWorkInternal(GetConf tool) throws IOException {
+ public int doWorkInternal(GetConf tool, String []args) throws IOException {
tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf()));
return 0;
}
@@ -199,7 +220,7 @@ public class GetConf extends Configured
*/
static class NNRpcAddressesCommandHandler extends CommandHandler {
@Override
- public int doWorkInternal(GetConf tool) throws IOException {
+ public int doWorkInternal(GetConf tool, String []args) throws IOException {
Configuration config = tool.getConf();
List<ConfiguredNNAddress> cnnlist = DFSUtil.flattenAddressMap(
DFSUtil.getNNServiceRpcAddresses(config));
@@ -215,6 +236,23 @@ public class GetConf extends Configured
}
}
+ static class PrintConfKeyCommandHandler extends CommandHandler {
+ @Override
+ protected void checkArgs(String[] args) {
+ if (args.length != 1) {
+ throw new HadoopIllegalArgumentException(
+ "usage: " + Command.CONFKEY.getUsage());
+ }
+ }
+
+ @Override
+ int doWorkInternal(GetConf tool, String[] args) throws Exception {
+ this.key = args[0];
+ System.err.println("key: " + key);
+ return super.doWorkInternal(tool, args);
+ }
+ }
+
private final PrintStream out; // Stream for printing command output
private final PrintStream err; // Stream for printing error
@@ -260,10 +298,11 @@ public class GetConf extends Configured
* @return return status of the command
*/
private int doWork(String[] args) {
- if (args.length == 1) {
+ if (args.length >= 1) {
CommandHandler handler = Command.getHandler(args[0]);
if (handler != null) {
- return handler.doWork(this);
+ return handler.doWork(this,
+ Arrays.copyOfRange(args, 1, args.length));
}
}
printUsage();
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java?rev=1310648&r1=1310647&r2=1310648&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java Sat Apr 7 00:15:30 2012
@@ -42,6 +42,8 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
+import com.google.common.base.Joiner;
+
/**
* Test for {@link GetConf}
*/
@@ -117,7 +119,12 @@ public class TestGetConf {
PrintStream out = new PrintStream(o, true);
try {
int ret = ToolRunner.run(new GetConf(conf, out, out), args);
- assertEquals(success, ret == 0);
+ out.flush();
+ System.err.println("Output: " + o.toString());
+ assertEquals("Expected " + (success?"success":"failure") +
+ " for args: " + Joiner.on(" ").join(args) + "\n" +
+ "Output: " + o.toString(),
+ success, ret == 0);
return o.toString();
} finally {
o.close();
@@ -222,7 +229,9 @@ public class TestGetConf {
getAddressListFromTool(TestType.SECONDARY, conf, false);
getAddressListFromTool(TestType.NNRPCADDRESSES, conf, false);
for (Command cmd : Command.values()) {
- CommandHandler handler = Command.getHandler(cmd.getName());
+ String arg = cmd.getName();
+ CommandHandler handler = Command.getHandler(arg);
+ assertNotNull("missing handler: " + cmd, handler);
if (handler.key != null) {
// First test with configuration missing the required key
String[] args = {handler.key};
@@ -319,18 +328,36 @@ public class TestGetConf {
verifyAddresses(conf, TestType.SECONDARY, false, secondaryAddresses);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, nnAddresses);
}
+
+ @Test
+ public void testGetSpecificKey() throws Exception {
+ HdfsConfiguration conf = new HdfsConfiguration();
+ conf.set("mykey", " myval ");
+ String[] args = {"-confKey", "mykey"};
+ assertTrue(runTool(conf, args, true).equals("myval\n"));
+ }
+
+ @Test
+ public void testExtraArgsThrowsError() throws Exception {
+ HdfsConfiguration conf = new HdfsConfiguration();
+ conf.set("mykey", "myval");
+ String[] args = {"-namenodes", "unexpected-arg"};
+ assertTrue(runTool(conf, args, false).contains(
+ "Did not expect argument: unexpected-arg"));
+ }
/**
* Tests commands other than {@link Command#NAMENODE}, {@link Command#BACKUP},
* {@link Command#SECONDARY} and {@link Command#NNRPCADDRESSES}
*/
+ @Test
public void testTool() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration(false);
for (Command cmd : Command.values()) {
CommandHandler handler = Command.getHandler(cmd.getName());
- if (handler.key != null) {
+ if (handler.key != null && !"-confKey".equals(cmd.getName())) {
// Add the key to the conf and ensure tool returns the right value
- String[] args = {handler.key};
+ String[] args = {cmd.getName()};
conf.set(handler.key, "value");
assertTrue(runTool(conf, args, true).contains("value"));
}