You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cm...@apache.org on 2013/12/30 20:36:27 UTC
svn commit: r1554297 - in
/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs:
CHANGES.txt src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
Author: cmccabe
Date: Mon Dec 30 19:36:26 2013
New Revision: 1554297
URL: http://svn.apache.org/r1554297
Log:
HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (satish via cmccabe)
Modified:
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1554297&r1=1554296&r2=1554297&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Mon Dec 30 19:36:26 2013
@@ -435,6 +435,9 @@ Release 2.3.0 - UNRELEASED
HDFS-5661. Browsing FileSystem via web ui, should use datanode's fqdn instead of ip
address. (Benoy Antony via jing9)
+ HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (sathish
+ via cmccabe)
+
Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java?rev=1554297&r1=1554296&r2=1554297&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java Mon Dec 30 19:36:26 2013
@@ -29,6 +29,7 @@ import java.util.Map;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
@@ -85,9 +86,9 @@ public class GetConf extends Configured
map.put(BACKUP.getName().toLowerCase(),
new BackupNodesCommandHandler());
map.put(INCLUDE_FILE.getName().toLowerCase(),
- new CommandHandler("DFSConfigKeys.DFS_HOSTS"));
+ new CommandHandler(DFSConfigKeys.DFS_HOSTS));
map.put(EXCLUDE_FILE.getName().toLowerCase(),
- new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE"));
+ new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
map.put(NNRPCADDRESSES.getName().toLowerCase(),
new NNRpcAddressesCommandHandler());
map.put(CONFKEY.getName().toLowerCase(),
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java?rev=1554297&r1=1554296&r2=1554297&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java Mon Dec 30 19:36:26 2013
@@ -33,10 +33,15 @@ import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -55,7 +60,7 @@ public class TestGetConf {
enum TestType {
NAMENODE, BACKUP, SECONDARY, NNRPCADDRESSES
}
-
+ FileSystem localFileSys;
/** Setup federation nameServiceIds in the configuration */
private void setupNameServices(HdfsConfiguration conf, int nameServiceIdCount) {
StringBuilder nsList = new StringBuilder();
@@ -379,4 +384,70 @@ public class TestGetConf {
}
}
}
+ @Test
+ public void TestGetConfExcludeCommand() throws Exception{
+ HdfsConfiguration conf = new HdfsConfiguration();
+ // Set up the hosts/exclude files.
+ localFileSys = FileSystem.getLocal(conf);
+ Path workingDir = localFileSys.getWorkingDirectory();
+ Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/");
+ Path hostsFile = new Path(dir, "hosts");
+ Path excludeFile = new Path(dir, "exclude");
+
+ // Setup conf
+ conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
+ conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
+ writeConfigFile(hostsFile, null);
+ writeConfigFile(excludeFile, null);
+ String[] args = {"-excludeFile"};
+ String ret = runTool(conf, args, true);
+ assertEquals(excludeFile.toUri().getPath(),ret.trim());
+ cleanupFile(localFileSys, excludeFile.getParent());
+ }
+
+ @Test
+ public void TestGetConfIncludeCommand() throws Exception{
+ HdfsConfiguration conf = new HdfsConfiguration();
+ // Set up the hosts/exclude files.
+ localFileSys = FileSystem.getLocal(conf);
+ Path workingDir = localFileSys.getWorkingDirectory();
+ Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/");
+ Path hostsFile = new Path(dir, "hosts");
+ Path excludeFile = new Path(dir, "exclude");
+
+ // Setup conf
+ conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
+ conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
+ writeConfigFile(hostsFile, null);
+ writeConfigFile(excludeFile, null);
+ String[] args = {"-includeFile"};
+ String ret = runTool(conf, args, true);
+ assertEquals(hostsFile.toUri().getPath(),ret.trim());
+ cleanupFile(localFileSys, excludeFile.getParent());
+ }
+
+ private void writeConfigFile(Path name, ArrayList<String> nodes)
+ throws IOException {
+ // delete if it already exists
+ if (localFileSys.exists(name)) {
+ localFileSys.delete(name, true);
+ }
+
+ FSDataOutputStream stm = localFileSys.create(name);
+
+ if (nodes != null) {
+ for (Iterator<String> it = nodes.iterator(); it.hasNext();) {
+ String node = it.next();
+ stm.writeBytes(node);
+ stm.writeBytes("\n");
+ }
+ }
+ stm.close();
+ }
+
+ private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
+ assertTrue(fileSys.exists(name));
+ fileSys.delete(name, true);
+ assertTrue(!fileSys.exists(name));
+ }
}