You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by wa...@apache.org on 2013/10/05 00:28:24 UTC
svn commit: r1529334 - in
/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/
src/main/java/org/apache/hadoop/hdfs/server/namenode/
src/main/java/org/apache/hadoop/hdfs/tools/ src/test/java/org/ap...
Author: wang
Date: Fri Oct 4 22:28:23 2013
New Revision: 1529334
URL: http://svn.apache.org/r1529334
Log:
HDFS-5190. Move cache pool related CLI commands to CacheAdmin. (Contributed by Andrew Wang)
Added:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java (with props)
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java (with props)
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java (with props)
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml (with props)
Modified:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt Fri Oct 4 22:28:23 2013
@@ -51,6 +51,9 @@ HDFS-4949 (Unreleased)
HDFS-5119. Persist CacheManager state in the edit log.
(Contributed by Andrew Wang)
+ HDFS-5190. Move cache pool related CLI commands to CacheAdmin.
+ (Contributed by Andrew Wang)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java Fri Oct 4 22:28:23 2013
@@ -2294,11 +2294,11 @@ public class DFSClient implements java.i
}
}
- public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor)
+ public void removePathBasedCacheDescriptor(long id)
throws IOException {
checkOpen();
try {
- namenode.removePathBasedCacheDescriptor(descriptor.getEntryId());
+ namenode.removePathBasedCacheDescriptor(id);
} catch (RemoteException re) {
throw re.unwrapRemoteException();
}
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Fri Oct 4 22:28:23 2013
@@ -1602,7 +1602,7 @@ public class DistributedFileSystem exten
*/
public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor)
throws IOException {
- dfs.removePathBasedCacheDescriptor(descriptor);
+ dfs.removePathBasedCacheDescriptor(descriptor.getEntryId());
}
/**
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java Fri Oct 4 22:28:23 2013
@@ -469,7 +469,7 @@ public final class CacheManager {
while (iter.hasNext()) {
Entry<Long, PathBasedCacheEntry> entry = iter.next();
if (entry.getValue().getPool() == pool) {
- entriesById.remove(entry.getValue().getEntryId());
+ entriesByPath.remove(entry.getValue().getPath());
iter.remove();
}
}
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java Fri Oct 4 22:28:23 2013
@@ -21,24 +21,76 @@ import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
+import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException;
+import org.apache.hadoop.hdfs.server.namenode.CachePool;
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Tool;
+
+import com.google.common.base.Joiner;
/**
* This class implements command-line operations on the HDFS Cache.
*/
@InterfaceAudience.Private
-public class CacheAdmin {
- private static Configuration conf = new Configuration();
+public class CacheAdmin extends Configured implements Tool {
+
+ /**
+ * Maximum length for printed lines
+ */
+ private static final int MAX_LINE_WIDTH = 80;
+
+ public CacheAdmin() {
+ this(null);
+ }
+
+ public CacheAdmin(Configuration conf) {
+ super(conf);
+ }
+
+ @Override
+ public int run(String[] args) throws IOException {
+ if (args.length == 0) {
+ printUsage(false);
+ return 1;
+ }
+ Command command = determineCommand(args[0]);
+ if (command == null) {
+ System.err.println("Can't understand command '" + args[0] + "'");
+ if (!args[0].startsWith("-")) {
+ System.err.println("Command names must start with dashes.");
+ }
+ printUsage(false);
+ return 1;
+ }
+ List<String> argsList = new LinkedList<String>();
+ for (int j = 1; j < args.length; j++) {
+ argsList.add(args[j]);
+ }
+ return command.run(getConf(), argsList);
+ }
+
+ public static void main(String[] argsArray) throws IOException {
+ CacheAdmin cacheAdmin = new CacheAdmin(new Configuration());
+ System.exit(cacheAdmin.run(argsArray));
+ }
- private static DistributedFileSystem getDFS() throws IOException {
+ private static DistributedFileSystem getDFS(Configuration conf)
+ throws IOException {
FileSystem fs = FileSystem.get(conf);
if (!(fs instanceof DistributedFileSystem)) {
throw new IllegalArgumentException("FileSystem " + fs.getUri() +
@@ -47,37 +99,55 @@ public class CacheAdmin {
return (DistributedFileSystem)fs;
}
+ /**
+ * NN exceptions contain the stack trace as part of the exception message.
+ * When it's a known error, pretty-print the error and squish the stack trace.
+ */
+ private static String prettifyException(Exception e) {
+ return e.getClass().getSimpleName() + ": "
+ + e.getLocalizedMessage().split("\n")[0];
+ }
+
+ private static TableListing getOptionDescriptionListing() {
+ TableListing listing = new TableListing.Builder()
+ .addField("").addField("", true)
+ .wrapWidth(MAX_LINE_WIDTH).hideHeaders().build();
+ return listing;
+ }
+
interface Command {
String getName();
String getShortUsage();
String getLongUsage();
- int run(List<String> args) throws IOException;
+ int run(Configuration conf, List<String> args) throws IOException;
}
private static class AddPathBasedCacheDirectiveCommand implements Command {
@Override
public String getName() {
- return "-addPath";
+ return "-addDirective";
}
@Override
public String getShortUsage() {
- return "[-addPath -path <path> -pool <pool-name>]\n";
+ return "[" + getName() + " -path <path> -pool <pool-name>]\n";
}
@Override
public String getLongUsage() {
- return getShortUsage() +
- "Adds a new PathBasedCache directive.\n" +
- "<path> The new path to cache.\n" +
- " Paths may be either directories or files.\n" +
- "<pool-name> The pool which this directive will reside in.\n" +
- " You must have write permission on the cache pool in order\n" +
- " to add new entries to it.\n";
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("<path>", "A path to cache. The path can be " +
+ "a directory or a file.");
+ listing.addRow("<pool-name>", "The pool to which the directive will be " +
+ "added. You must have write permission on the cache pool "
+ + "in order to add new directives.");
+ return getShortUsage() + "\n" +
+ "Add a new PathBasedCache directive.\n\n" +
+ listing.toString();
}
@Override
- public int run(List<String> args) throws IOException {
+ public int run(Configuration conf, List<String> args) throws IOException {
String path = StringUtils.popOptionWithArgument("-path", args);
if (path == null) {
System.err.println("You must specify a path with -path.");
@@ -93,14 +163,20 @@ public class CacheAdmin {
return 1;
}
- DistributedFileSystem dfs = getDFS();
+ DistributedFileSystem dfs = getDFS(conf);
PathBasedCacheDirective directive =
new PathBasedCacheDirective(path, poolName);
- PathBasedCacheDescriptor descriptor =
- dfs.addPathBasedCacheDirective(directive);
- System.out.println("Added PathBasedCache entry "
- + descriptor.getEntryId());
+ try {
+ PathBasedCacheDescriptor descriptor =
+ dfs.addPathBasedCacheDirective(directive);
+ System.out.println("Added PathBasedCache entry "
+ + descriptor.getEntryId());
+ } catch (AddPathBasedCacheDirectiveException e) {
+ System.err.println(prettifyException(e));
+ return 2;
+ }
+
return 0;
}
}
@@ -108,32 +184,41 @@ public class CacheAdmin {
private static class RemovePathBasedCacheDirectiveCommand implements Command {
@Override
public String getName() {
- return "-removePath";
+ return "-removeDirective";
}
@Override
public String getShortUsage() {
- return "[-removePath <id>]\n";
+ return "[" + getName() + " <id>]\n";
}
@Override
public String getLongUsage() {
- return getShortUsage() +
- "Remove a cache directive.\n" +
- "<id> The id of the cache directive to remove.\n" +
- " You must have write permission on the pool where the\n" +
- " directive resides in order to remove it. To see a list\n" +
- " of PathBasedCache directive IDs, use the -list command.\n";
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("<id>", "The id of the cache directive to remove. " +
+ "You must have write permission on the pool of the " +
+ "directive in order to remove it. To see a list " +
+ "of PathBasedCache directive IDs, use the -list command.");
+ return getShortUsage() + "\n" +
+ "Remove a cache directive.\n\n" +
+ listing.toString();
}
@Override
- public int run(List<String> args) throws IOException {
+ public int run(Configuration conf, List<String> args) throws IOException {
String idString= StringUtils.popFirstNonOption(args);
if (idString == null) {
System.err.println("You must specify a directive ID to remove.");
return 1;
}
- long id = Long.valueOf(idString);
+ long id;
+ try {
+ id = Long.valueOf(idString);
+ } catch (NumberFormatException e) {
+ System.err.println("Invalid directive ID " + idString + ": expected " +
+ "a numeric value.");
+ return 1;
+ }
if (id <= 0) {
System.err.println("Invalid directive ID " + id + ": ids must " +
"be greater than 0.");
@@ -141,12 +226,17 @@ public class CacheAdmin {
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
+ System.err.println("Usage is " + getShortUsage());
return 1;
}
- DistributedFileSystem dfs = getDFS();
- dfs.removePathBasedCacheDescriptor(new PathBasedCacheDescriptor(id, null,
- null));
- System.out.println("Removed PathBasedCache directive " + id);
+ DistributedFileSystem dfs = getDFS(conf);
+ try {
+ dfs.getClient().removePathBasedCacheDescriptor(id);
+ System.out.println("Removed PathBasedCache directive " + id);
+ } catch (RemovePathBasedCacheDescriptorException e) {
+ System.err.println(prettifyException(e));
+ return 2;
+ }
return 0;
}
}
@@ -154,31 +244,30 @@ public class CacheAdmin {
private static class ListPathBasedCacheDirectiveCommand implements Command {
@Override
public String getName() {
- return "-listPaths";
+ return "-listDirectives";
}
@Override
public String getShortUsage() {
- return "[-listPaths [-path <path>] [-pool <pool-name>]]\n";
+ return "[" + getName() + " [-path <path>] [-pool <pool>]]\n";
}
@Override
public String getLongUsage() {
- return getShortUsage() +
- "List PathBasedCache directives.\n" +
- "<path> If a -path argument is given, we will list only\n" +
- " PathBasedCache entries with this path.\n" +
- " Note that if there is a PathBasedCache directive for <path>\n" +
- " in a cache pool that we don't have read access for, it\n" +
- " not be listed. If there are unreadable cache pools, a\n" +
- " message will be printed.\n" +
- " may be incomplete.\n" +
- "<pool-name> If a -pool argument is given, we will list only path\n" +
- " cache entries in that pool.\n";
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("<path>", "List only " +
+ "PathBasedCache directives with this path. " +
+ "Note that if there is a PathBasedCache directive for <path> " +
+ "in a cache pool that we don't have read access for, it " +
+ "will not be listed.");
+ listing.addRow("<pool>", "List only path cache directives in that pool.");
+ return getShortUsage() + "\n" +
+ "List PathBasedCache directives.\n\n" +
+ listing.toString();
}
@Override
- public int run(List<String> args) throws IOException {
+ public int run(Configuration conf, List<String> args) throws IOException {
String pathFilter = StringUtils.popOptionWithArgument("-path", args);
String poolFilter = StringUtils.popOptionWithArgument("-pool", args);
if (!args.isEmpty()) {
@@ -186,11 +275,11 @@ public class CacheAdmin {
return 1;
}
TableListing tableListing = new TableListing.Builder().
- addField("ID", Justification.RIGHT).
+ addField("ID", Justification.LEFT).
addField("POOL", Justification.LEFT).
addField("PATH", Justification.LEFT).
build();
- DistributedFileSystem dfs = getDFS();
+ DistributedFileSystem dfs = getDFS(conf);
RemoteIterator<PathBasedCacheDescriptor> iter =
dfs.listPathBasedCacheDescriptors(poolFilter, pathFilter);
int numEntries = 0;
@@ -205,12 +294,325 @@ public class CacheAdmin {
System.out.print(String.format("Found %d entr%s\n",
numEntries, numEntries == 1 ? "y" : "ies"));
if (numEntries > 0) {
- System.out.print(tableListing.build());
+ System.out.print(tableListing);
+ }
+ return 0;
+ }
+ }
+
+ private static class AddCachePoolCommand implements Command {
+
+ private static final String NAME = "-addPool";
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ @Override
+ public String getShortUsage() {
+ return "[" + NAME + " <name> [-owner <owner>] " +
+ "[-group <group>] [-mode <mode>] [-weight <weight>]]\n";
+ }
+
+ @Override
+ public String getLongUsage() {
+ TableListing listing = getOptionDescriptionListing();
+
+ listing.addRow("<name>", "Name of the new pool.");
+ listing.addRow("<owner>", "Username of the owner of the pool. " +
+ "Defaults to the current user.");
+ listing.addRow("<group>", "Group of the pool. " +
+ "Defaults to the primary group name of the current user.");
+ listing.addRow("<mode>", "UNIX-style permissions for the pool. " +
+ "Permissions are specified in octal, e.g. 0755. " +
+ "By default, this is set to " + String.format("0%03o",
+ FsPermission.getCachePoolDefault().toShort()));
+ listing.addRow("<weight>", "Weight of the pool. " +
+ "This is a relative measure of the importance of the pool used " +
+ "during cache resource management. By default, it is set to " +
+ CachePool.DEFAULT_WEIGHT);
+
+ return getShortUsage() + "\n" +
+ "Add a new cache pool.\n\n" +
+ listing.toString();
+ }
+
+ @Override
+ public int run(Configuration conf, List<String> args) throws IOException {
+ String owner = StringUtils.popOptionWithArgument("-owner", args);
+ if (owner == null) {
+ owner = UserGroupInformation.getCurrentUser().getShortUserName();
+ }
+ String group = StringUtils.popOptionWithArgument("-group", args);
+ if (group == null) {
+ group = UserGroupInformation.getCurrentUser().getGroupNames()[0];
+ }
+ String modeString = StringUtils.popOptionWithArgument("-mode", args);
+ int mode;
+ if (modeString == null) {
+ mode = FsPermission.getCachePoolDefault().toShort();
+ } else {
+ mode = Integer.parseInt(modeString, 8);
+ }
+ String weightString = StringUtils.popOptionWithArgument("-weight", args);
+ int weight;
+ if (weightString == null) {
+ weight = CachePool.DEFAULT_WEIGHT;
+ } else {
+ weight = Integer.parseInt(weightString);
+ }
+ String name = StringUtils.popFirstNonOption(args);
+ if (name == null) {
+ System.err.println("You must specify a name when creating a " +
+ "cache pool.");
+ return 1;
+ }
+ if (!args.isEmpty()) {
+ System.err.print("Can't understand arguments: " +
+ Joiner.on(" ").join(args) + "\n");
+ System.err.println("Usage is " + getShortUsage());
+ return 1;
+ }
+ DistributedFileSystem dfs = getDFS(conf);
+ CachePoolInfo info = new CachePoolInfo(name).
+ setOwnerName(owner).
+ setGroupName(group).
+ setMode(new FsPermission((short)mode)).
+ setWeight(weight);
+ try {
+ dfs.addCachePool(info);
+ } catch (IOException e) {
+ throw new RemoteException(e.getClass().getName(), e.getMessage());
+ }
+ System.out.println("Successfully added cache pool " + name + ".");
+ return 0;
+ }
+ }
+
+ private static class ModifyCachePoolCommand implements Command {
+
+ @Override
+ public String getName() {
+ return "-modifyPool";
+ }
+
+ @Override
+ public String getShortUsage() {
+ return "[" + getName() + " <name> [-owner <owner>] " +
+ "[-group <group>] [-mode <mode>] [-weight <weight>]]\n";
+ }
+
+ @Override
+ public String getLongUsage() {
+ TableListing listing = getOptionDescriptionListing();
+
+ listing.addRow("<name>", "Name of the pool to modify.");
+ listing.addRow("<owner>", "Username of the owner of the pool");
+ listing.addRow("<group>", "Groupname of the group of the pool.");
+ listing.addRow("<mode>", "Unix-style permissions of the pool in octal.");
+ listing.addRow("<weight>", "Weight of the pool.");
+
+ return getShortUsage() + "\n" +
+ WordUtils.wrap("Modifies the metadata of an existing cache pool. " +
+ "See usage of " + AddCachePoolCommand.NAME + " for more details",
+ MAX_LINE_WIDTH) + "\n\n" +
+ listing.toString();
+ }
+
+ @Override
+ public int run(Configuration conf, List<String> args) throws IOException {
+ String owner = StringUtils.popOptionWithArgument("-owner", args);
+ String group = StringUtils.popOptionWithArgument("-group", args);
+ String modeString = StringUtils.popOptionWithArgument("-mode", args);
+ Integer mode = (modeString == null) ?
+ null : Integer.parseInt(modeString, 8);
+ String weightString = StringUtils.popOptionWithArgument("-weight", args);
+ Integer weight = (weightString == null) ?
+ null : Integer.parseInt(weightString);
+ String name = StringUtils.popFirstNonOption(args);
+ if (name == null) {
+ System.err.println("You must specify a name when creating a " +
+ "cache pool.");
+ return 1;
+ }
+ if (!args.isEmpty()) {
+ System.err.print("Can't understand arguments: " +
+ Joiner.on(" ").join(args) + "\n");
+ System.err.println("Usage is " + getShortUsage());
+ return 1;
+ }
+ boolean changed = false;
+ CachePoolInfo info = new CachePoolInfo(name);
+ if (owner != null) {
+ info.setOwnerName(owner);
+ changed = true;
+ }
+ if (group != null) {
+ info.setGroupName(group);
+ changed = true;
+ }
+ if (mode != null) {
+ info.setMode(new FsPermission(mode.shortValue()));
+ changed = true;
+ }
+ if (weight != null) {
+ info.setWeight(weight);
+ changed = true;
+ }
+ if (!changed) {
+ System.err.println("You must specify at least one attribute to " +
+ "change in the cache pool.");
+ return 1;
+ }
+ DistributedFileSystem dfs = getDFS(conf);
+ try {
+ dfs.modifyCachePool(info);
+ } catch (IOException e) {
+ throw new RemoteException(e.getClass().getName(), e.getMessage());
+ }
+ System.out.print("Successfully modified cache pool " + name);
+ String prefix = " to have ";
+ if (owner != null) {
+ System.out.print(prefix + "owner name " + owner);
+ prefix = " and ";
+ }
+ if (group != null) {
+ System.out.print(prefix + "group name " + group);
+ prefix = " and ";
+ }
+ if (mode != null) {
+ System.out.print(prefix + "mode " + new FsPermission(mode.shortValue()));
+ prefix = " and ";
+ }
+ if (weight != null) {
+ System.out.print(prefix + "weight " + weight);
+ prefix = " and ";
+ }
+ System.out.print("\n");
+ return 0;
+ }
+ }
+
+ private static class RemoveCachePoolCommand implements Command {
+
+ @Override
+ public String getName() {
+ return "-removePool";
+ }
+
+ @Override
+ public String getShortUsage() {
+ return "[" + getName() + " <name>]\n";
+ }
+
+ @Override
+ public String getLongUsage() {
+ return getShortUsage() + "\n" +
+ WordUtils.wrap("Remove a cache pool. This also uncaches paths " +
+ "associated with the pool.\n\n", MAX_LINE_WIDTH) +
+ "<name> Name of the cache pool to remove.\n";
+ }
+
+ @Override
+ public int run(Configuration conf, List<String> args) throws IOException {
+ String name = StringUtils.popFirstNonOption(args);
+ if (name == null) {
+ System.err.println("You must specify a name when deleting a " +
+ "cache pool.");
+ return 1;
+ }
+ if (!args.isEmpty()) {
+ System.err.print("Can't understand arguments: " +
+ Joiner.on(" ").join(args) + "\n");
+ System.err.println("Usage is " + getShortUsage());
+ return 1;
}
+ DistributedFileSystem dfs = getDFS(conf);
+ try {
+ dfs.removeCachePool(name);
+ } catch (IOException e) {
+ throw new RemoteException(e.getClass().getName(), e.getMessage());
+ }
+ System.out.println("Successfully removed cache pool " + name + ".");
return 0;
}
}
+ private static class ListCachePoolsCommand implements Command {
+
+ @Override
+ public String getName() {
+ return "-listPools";
+ }
+
+ @Override
+ public String getShortUsage() {
+ return "[" + getName() + " [name]]\n";
+ }
+
+ @Override
+ public String getLongUsage() {
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("[name]", "If specified, list only the named cache pool.");
+
+ return getShortUsage() + "\n" +
+ WordUtils.wrap("Display information about one or more cache pools, " +
+ "e.g. name, owner, group, permissions, etc.", MAX_LINE_WIDTH) +
+ "\n\n" +
+ listing.toString();
+ }
+
+ @Override
+ public int run(Configuration conf, List<String> args) throws IOException {
+ String name = StringUtils.popFirstNonOption(args);
+ if (!args.isEmpty()) {
+ System.err.print("Can't understand arguments: " +
+ Joiner.on(" ").join(args) + "\n");
+ System.err.println("Usage is " + getShortUsage());
+ return 1;
+ }
+ DistributedFileSystem dfs = getDFS(conf);
+ TableListing listing = new TableListing.Builder().
+ addField("NAME", Justification.LEFT).
+ addField("OWNER", Justification.LEFT).
+ addField("GROUP", Justification.LEFT).
+ addField("MODE", Justification.LEFT).
+ addField("WEIGHT", Justification.LEFT).
+ build();
+ int numResults = 0;
+ try {
+ RemoteIterator<CachePoolInfo> iter = dfs.listCachePools();
+ while (iter.hasNext()) {
+ CachePoolInfo info = iter.next();
+ if (name == null || info.getPoolName().equals(name)) {
+ listing.addRow(new String[] {
+ info.getPoolName(),
+ info.getOwnerName(),
+ info.getGroupName(),
+ info.getMode().toString(),
+ info.getWeight().toString(),
+ });
+ ++numResults;
+ if (name != null) {
+ break;
+ }
+ }
+ }
+ } catch (IOException e) {
+ throw new RemoteException(e.getClass().getName(), e.getMessage());
+ }
+ System.out.print(String.format("Found %d result%s.\n", numResults,
+ (numResults == 1 ? "" : "s")));
+ if (numResults > 0) {
+ System.out.print(listing);
+ }
+ // If there are no results, we return 1 (failure exit code);
+ // otherwise we return 0 (success exit code).
+ return (numResults == 0) ? 1 : 0;
+ }
+ }
+
private static class HelpCommand implements Command {
@Override
public String getName() {
@@ -224,15 +626,17 @@ public class CacheAdmin {
@Override
public String getLongUsage() {
- return getShortUsage() +
- "Get detailed help about a command.\n" +
- "<command-name> The command to get detailed help for. If no " +
- " command-name is specified, we will print detailed help " +
- " about all commands";
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("<command-name>", "The command for which to get " +
+ "detailed help. If no command is specified, print detailed help for " +
+ "all commands");
+ return getShortUsage() + "\n" +
+ "Get detailed help about a command.\n\n" +
+ listing.toString();
}
@Override
- public int run(List<String> args) throws IOException {
+ public int run(Configuration conf, List<String> args) throws IOException {
if (args.size() == 0) {
for (Command command : COMMANDS) {
System.err.println(command.getLongUsage());
@@ -255,6 +659,7 @@ public class CacheAdmin {
System.err.print(separator + c.getName());
separator = ", ";
}
+ System.err.print("\n");
return 1;
}
System.err.print(command.getLongUsage());
@@ -266,6 +671,10 @@ public class CacheAdmin {
new AddPathBasedCacheDirectiveCommand(),
new RemovePathBasedCacheDirectiveCommand(),
new ListPathBasedCacheDirectiveCommand(),
+ new AddCachePoolCommand(),
+ new ModifyCachePoolCommand(),
+ new RemoveCachePoolCommand(),
+ new ListCachePoolsCommand(),
new HelpCommand(),
};
@@ -290,25 +699,4 @@ public class CacheAdmin {
}
return null;
}
-
- public static void main(String[] argsArray) throws IOException {
- if (argsArray.length == 0) {
- printUsage(false);
- System.exit(1);
- }
- Command command = determineCommand(argsArray[0]);
- if (command == null) {
- System.err.println("Can't understand command '" + argsArray[0] + "'");
- if (!argsArray[0].startsWith("-")) {
- System.err.println("Command names must start with dashes.");
- }
- printUsage(false);
- System.exit(1);
- }
- List<String> args = new LinkedList<String>();
- for (int j = 1; j < argsArray.length; j++) {
- args.add(argsArray[j]);
- }
- System.exit(command.run(args));
- }
}
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Fri Oct 4 22:28:23 2013
@@ -24,7 +24,6 @@ import java.security.PrivilegedException
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
@@ -37,8 +36,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.shell.Command;
import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -47,17 +44,14 @@ import org.apache.hadoop.hdfs.Distribute
import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.NameNodeProxies;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.SnapshotException;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
-import org.apache.hadoop.hdfs.server.namenode.CachePool;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
-import org.apache.hadoop.hdfs.tools.TableListing.Justification;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
@@ -68,8 +62,6 @@ import org.apache.hadoop.security.author
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
-import com.google.common.base.Joiner;
-
/**
* This class provides some DFS administrative access shell commands.
*/
@@ -463,230 +455,6 @@ public class DFSAdmin extends FsShell {
return exitCode;
}
- final private static String ADD_CACHE_POOL_USAGE =
- "-addCachePool <name> [-owner <owner>] " +
- "[-group <group>] [-mode <mode>] [-weight <weight>]";
-
- public int addCachePool(String argsArray[], int idx) throws IOException {
- List<String> args= new LinkedList<String>();
- for (int i = idx; i < argsArray.length; i++) {
- args.add(argsArray[i]);
- }
- String owner = StringUtils.popOptionWithArgument("-owner", args);
- if (owner == null) {
- owner = UserGroupInformation.getCurrentUser().getShortUserName();
- }
- String group = StringUtils.popOptionWithArgument("-group", args);
- if (group == null) {
- group = UserGroupInformation.getCurrentUser().getGroupNames()[0];
- }
- String modeString = StringUtils.popOptionWithArgument("-mode", args);
- int mode;
- if (modeString == null) {
- mode = FsPermission.getCachePoolDefault().toShort();
- } else {
- mode = Integer.parseInt(modeString, 8);
- }
- String weightString = StringUtils.popOptionWithArgument("-weight", args);
- int weight;
- if (weightString == null) {
- weight = CachePool.DEFAULT_WEIGHT;
- } else {
- weight = Integer.parseInt(weightString);
- }
- String name = StringUtils.popFirstNonOption(args);
- if (name == null) {
- System.err.println("You must specify a name when creating a " +
- "cache pool.");
- return 1;
- }
- if (!args.isEmpty()) {
- System.err.print("Can't understand arguments: " +
- Joiner.on(" ").join(args) + "\n");
- System.err.println("Usage is " + ADD_CACHE_POOL_USAGE);
- return 1;
- }
- DistributedFileSystem dfs = getDFS();
- CachePoolInfo info = new CachePoolInfo(name).
- setOwnerName(owner).
- setGroupName(group).
- setMode(new FsPermission((short)mode)).
- setWeight(weight);
- try {
- dfs.addCachePool(info);
- } catch (IOException e) {
- throw new RemoteException(e.getClass().getName(), e.getMessage());
- }
- System.out.println("Successfully added cache pool " + name + ".");
- return 0;
- }
-
- final private static String MODIFY_CACHE_POOL_USAGE =
- "-modifyCachePool <name> [-owner <owner>] " +
- "[-group <group>] [-mode <mode>] [-weight <weight>]";
-
- public int modifyCachePool(String argsArray[], int idx) throws IOException {
- List<String> args = new LinkedList<String>();
- for (int i = idx; i < argsArray.length; i++) {
- args.add(argsArray[i]);
- }
- String owner = StringUtils.popOptionWithArgument("-owner", args);
- String group = StringUtils.popOptionWithArgument("-group", args);
- String modeString = StringUtils.popOptionWithArgument("-mode", args);
- Integer mode = (modeString == null) ?
- null : Integer.parseInt(modeString, 8);
- String weightString = StringUtils.popOptionWithArgument("-weight", args);
- Integer weight = (weightString == null) ?
- null : Integer.parseInt(weightString);
- String name = StringUtils.popFirstNonOption(args);
- if (name == null) {
- System.err.println("You must specify a name when creating a " +
- "cache pool.");
- return 1;
- }
- if (!args.isEmpty()) {
- System.err.print("Can't understand arguments: " +
- Joiner.on(" ").join(args) + "\n");
- System.err.println("usage is " + MODIFY_CACHE_POOL_USAGE);
- return 1;
- }
- boolean changed = false;
- CachePoolInfo info = new CachePoolInfo(name);
- if (owner != null) {
- info.setOwnerName(owner);
- changed = true;
- }
- if (group != null) {
- info.setGroupName(group);
- changed = true;
- }
- if (mode != null) {
- info.setMode(new FsPermission(mode.shortValue()));
- changed = true;
- }
- if (weight != null) {
- info.setWeight(weight);
- changed = true;
- }
- if (!changed) {
- System.err.println("You must specify at least one attribute to " +
- "change in the cache pool.");
- return 1;
- }
- DistributedFileSystem dfs = getDFS();
- try {
- dfs.modifyCachePool(info);
- } catch (IOException e) {
- throw new RemoteException(e.getClass().getName(), e.getMessage());
- }
- System.out.print("Successfully modified cache pool " + name);
- String prefix = " to have ";
- if (owner != null) {
- System.out.print(prefix + "owner name " + owner);
- prefix = "and ";
- }
- if (group != null) {
- System.out.print(prefix + "group name " + group);
- prefix = "and ";
- }
- if (mode != null) {
- System.out.print(prefix + "mode " + new FsPermission(mode.shortValue()));
- prefix = "and ";
- }
- if (weight != null) {
- System.out.print(prefix + "weight " + weight);
- prefix = "and ";
- }
- System.out.print("\n");
- return 0;
- }
-
- final private static String REMOVE_CACHE_POOL_USAGE =
- "-removeCachePool <name>";
-
- public int removeCachePool(String argsArray[], int idx) throws IOException {
- List<String> args = new LinkedList<String>();
- for (int i = idx; i < argsArray.length; i++) {
- args.add(argsArray[i]);
- }
- String name = StringUtils.popFirstNonOption(args);
- if (name == null) {
- System.err.println("You must specify a name when deleting a " +
- "cache pool.");
- return 1;
- }
- if (!args.isEmpty()) {
- System.err.print("Can't understand arguments: " +
- Joiner.on(" ").join(args) + "\n");
- System.err.println("Usage is " + REMOVE_CACHE_POOL_USAGE);
- return 1;
- }
- DistributedFileSystem dfs = getDFS();
- try {
- dfs.removeCachePool(name);
- } catch (IOException e) {
- dfs.removeCachePool(name);
- throw new RemoteException(e.getClass().getName(), e.getMessage());
- }
- System.out.println("Successfully removed cache pool " + name + ".");
- return 0;
- }
-
- final private static String LIST_CACHE_POOLS_USAGE =
- "-listCachePools] [-verbose] [name]";
-
- public int listCachePools(String argsArray[], int idx) throws IOException {
- List<String> args = new LinkedList<String>();
- for (int i = idx; i < argsArray.length; i++) {
- args.add(argsArray[i]);
- }
- String name = StringUtils.popFirstNonOption(args);
- if (!args.isEmpty()) {
- System.err.print("Can't understand arguments: " +
- Joiner.on(" ").join(args) + "\n");
- System.err.println("usage is " + LIST_CACHE_POOLS_USAGE);
- return 1;
- }
- DistributedFileSystem dfs = getDFS();
- TableListing listing = new TableListing.Builder().
- addField("NAME", Justification.LEFT).
- addField("OWNER", Justification.LEFT).
- addField("GROUP", Justification.LEFT).
- addField("MODE", Justification.LEFT).
- addField("WEIGHT", Justification.RIGHT).
- build();
- int numResults = 0;
- try {
- RemoteIterator<CachePoolInfo> iter = dfs.listCachePools();
- while (iter.hasNext()) {
- CachePoolInfo info = iter.next();
- if (name == null || info.getPoolName().equals(name)) {
- listing.addRow(new String[] {
- info.getPoolName(),
- info.getOwnerName(),
- info.getGroupName(),
- info.getMode().toString(),
- info.getWeight().toString(),
- });
- ++numResults;
- if (name != null) {
- break;
- }
- }
- }
- } catch (IOException e) {
- throw new RemoteException(e.getClass().getName(), e.getMessage());
- }
- System.out.print(String.format("Found %d result%s.\n", numResults,
- (numResults == 1 ? "" : "s")));
- if (numResults > 0) {
- System.out.print(listing.build());
- }
- // If there are no results, we return 1 (failure exit code);
- // otherwise we return 0 (success exit code).
- return (numResults == 0) ? 1 : 0;
- }
-
public int rollEdits() throws IOException {
DistributedFileSystem dfs = getDFS();
long txid = dfs.rollEdits();
@@ -814,10 +582,6 @@ public class DFSAdmin extends FsShell {
"\t[-fetchImage <local directory>]\n" +
"\t[-allowSnapshot <snapshotDir>]\n" +
"\t[-disallowSnapshot <snapshotDir>]\n" +
- "\t[" + ADD_CACHE_POOL_USAGE + "]\n" +
- "\t[" + MODIFY_CACHE_POOL_USAGE + "]\n" +
- "\t[" + REMOVE_CACHE_POOL_USAGE + "]\n" +
- "\t[" + LIST_CACHE_POOLS_USAGE + "]\n" +
"\t[-help [cmd]]\n";
String report ="-report: \tReports basic filesystem information and statistics.\n";
@@ -915,42 +679,6 @@ public class DFSAdmin extends FsShell {
String disallowSnapshot = "-disallowSnapshot <snapshotDir>:\n" +
"\tDo not allow snapshots to be taken on a directory any more.\n";
- String addCachePool = ADD_CACHE_POOL_USAGE + ": \n" +
- "\tAdd a new cache pool.\n" +
- "\t<name> is the name of the new pool. It must not already be used.\n" +
- "\t<owner> is the owner of the pool. It defaults to the current\n" +
- "\tuser name.\n" +
- "\t<group> is the group of the pool. It defaults to the primary\n" +
- "\tgroup name of the current user.\n" +
- "\t<mode> is the mode of the pool. This is a UNIX-style numeric mode\n" +
- "\targument, supplied as an octal number. For example, mode 0755\n" +
- "\tgrants the owner all permissions, and grants everyone else\n" +
- "\tonly read and list permissions.\n" +
- "\tThe mode defaults to " +
- String.format("0%03o",
- FsPermission.getCachePoolDefault().toShort()) + "\n" +
- "\t<weight> is the weight of the pool. This determines what share \n" +
- "\tof cluster resources the pool will get. It defaults to " +
- CachePool.DEFAULT_WEIGHT + "\n";
-
- String modifyCachePool = MODIFY_CACHE_POOL_USAGE + ": \n" +
- "\tAdd a new cache pool with the given name.\n" +
- "\t<name> is the name of the pool to modify.\n" +
- "\t<owner> is the new owner of the pool.\n" +
- "\t<group> is the new group of the pool.\n" +
- "\t<mode> is the new mode of the pool.\n" +
- "\t<weight> is the new weight of the pool.\n";
-
- String removeCachePool = REMOVE_CACHE_POOL_USAGE + ": \n" +
- "\tRemove a cache pool.\n" +
- "\t<name> is the name of the pool to remove.\n";
-
- String listCachePools = " -listCachePools [-name <name>] [-verbose]\n" +
- "\tList cache pools.\n" +
- "\tIf <name> is specified, we will list only the cache pool with\n" +
- "\tthat name. If <verbose> is specified, we will list detailed\n" +
- "\tinformation about each pool\n";
-
String help = "-help [cmd]: \tDisplays help for the given command or all commands if none\n" +
"\t\tis specified.\n";
@@ -998,14 +726,6 @@ public class DFSAdmin extends FsShell {
System.out.println(allowSnapshot);
} else if ("disallowSnapshot".equalsIgnoreCase(cmd)) {
System.out.println(disallowSnapshot);
- } else if ("addCachePool".equalsIgnoreCase(cmd)) {
- System.out.println(addCachePool);
- } else if ("modifyCachePool".equalsIgnoreCase(cmd)) {
- System.out.println(modifyCachePool);
- } else if ("removeCachePool".equalsIgnoreCase(cmd)) {
- System.out.println(removeCachePool);
- } else if ("listCachePools".equalsIgnoreCase(cmd)) {
- System.out.println(listCachePools);
} else if ("help".equals(cmd)) {
System.out.println(help);
} else {
@@ -1032,13 +752,6 @@ public class DFSAdmin extends FsShell {
System.out.println(fetchImage);
System.out.println(allowSnapshot);
System.out.println(disallowSnapshot);
- System.out.println(addCachePool);
- System.out.println(modifyCachePool);
- System.out.println(removeCachePool);
- System.out.println(listCachePools);
-
- System.out.println(disallowSnapshot);
-
System.out.println(help);
System.out.println();
ToolRunner.printGenericCommandUsage(System.out);
@@ -1275,18 +988,6 @@ public class DFSAdmin extends FsShell {
} else if ("-fetchImage".equals(cmd)) {
System.err.println("Usage: java DFSAdmin"
+ " [-fetchImage <local directory>]");
- } else if ("-addCachePool".equals(cmd)) {
- System.err.println("Usage: java DFSAdmin"
- + " [" + ADD_CACHE_POOL_USAGE + "]");
- } else if ("-modifyCachePool".equals(cmd)) {
- System.err.println("Usage: java DFSAdmin"
- + " [" + MODIFY_CACHE_POOL_USAGE + "]");
- } else if ("-removeCachePool".equals(cmd)) {
- System.err.println("Usage: java DFSAdmin"
- + " [" + REMOVE_CACHE_POOL_USAGE + "]");
- } else if ("-listCachePools".equals(cmd)) {
- System.err.println("Usage: java DFSAdmin"
- + " [" + LIST_CACHE_POOLS_USAGE + "]");
} else {
System.err.println("Usage: java DFSAdmin");
System.err.println("Note: Administrative commands can only be run as the HDFS superuser.");
@@ -1312,10 +1013,6 @@ public class DFSAdmin extends FsShell {
System.err.println(" ["+ClearSpaceQuotaCommand.USAGE+"]");
System.err.println(" [-setBalancerBandwidth <bandwidth in bytes per second>]");
System.err.println(" [-fetchImage <local directory>]");
- System.err.println(" [" + ADD_CACHE_POOL_USAGE + "]");
- System.err.println(" [" + MODIFY_CACHE_POOL_USAGE + "]");
- System.err.println(" [" + REMOVE_CACHE_POOL_USAGE + "]");
- System.err.println(" [" + LIST_CACHE_POOLS_USAGE + "]");
System.err.println(" [-help [cmd]]");
System.err.println();
ToolRunner.printGenericCommandUsage(System.err);
@@ -1488,14 +1185,6 @@ public class DFSAdmin extends FsShell {
exitCode = setBalancerBandwidth(argv, i);
} else if ("-fetchImage".equals(cmd)) {
exitCode = fetchImage(argv, i);
- } else if ("-addCachePool".equals(cmd)) {
- exitCode = addCachePool(argv, i);
- } else if ("-modifyCachePool".equals(cmd)) {
- exitCode = modifyCachePool(argv, i);
- } else if ("-removeCachePool".equals(cmd)) {
- exitCode = removeCachePool(argv, i);
- } else if ("-listCachePools".equals(cmd)) {
- exitCode = listCachePools(argv, i);
} else if ("-help".equals(cmd)) {
if (i < argv.length) {
printHelp(argv[i]);
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java Fri Oct 4 22:28:23 2013
@@ -17,13 +17,23 @@
*/
package org.apache.hadoop.hdfs.tools;
+import java.util.ArrayList;
import java.util.LinkedList;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* This class implements a "table listing" with column headers.
+ *
+ * Example:
+ *
+ * NAME OWNER GROUP MODE WEIGHT
+ * pool1 andrew andrew rwxr-xr-x 100
+ * pool2 andrew andrew rwxr-xr-x 100
+ * pool3 andrew andrew rwxr-xr-x 100
+ *
*/
@InterfaceAudience.Private
public class TableListing {
@@ -33,39 +43,80 @@ public class TableListing {
}
private static class Column {
- private final LinkedList<String> rows;
+ private final ArrayList<String> rows;
private final Justification justification;
- private int maxLength;
+ private final boolean wrap;
- Column(String title, Justification justification) {
- this.rows = new LinkedList<String>();
+ private int wrapWidth = Integer.MAX_VALUE;
+ private int maxWidth;
+
+ Column(String title, Justification justification, boolean wrap) {
+ this.rows = new ArrayList<String>();
this.justification = justification;
- this.maxLength = 0;
+ this.wrap = wrap;
+ this.maxWidth = 0;
addRow(title);
}
private void addRow(String val) {
- if ((val.length() + 1) > maxLength) {
- maxLength = val.length() + 1;
+ if ((val.length() + 1) > maxWidth) {
+ maxWidth = val.length() + 1;
+ }
+ // Ceiling at wrapWidth, because it'll get wrapped
+ if (maxWidth > wrapWidth) {
+ maxWidth = wrapWidth;
}
rows.add(val);
}
- String getRow(int i) {
- String raw = rows.get(i);
- int paddingLength = maxLength - raw.length();
- String padding = (paddingLength <= 0) ? "" :
- StringUtils.repeat(" ", paddingLength);
- if (justification == Justification.LEFT) {
- return raw + padding;
- } else {
- return padding + raw;
+ private int getMaxWidth() {
+ return maxWidth;
+ }
+
+ private void setWrapWidth(int width) {
+ wrapWidth = width;
+ // Ceiling the maxLength at wrapWidth
+ if (maxWidth > wrapWidth) {
+ maxWidth = wrapWidth;
+ }
+ // Else we need to traverse through and find the real maxWidth
+ else {
+ maxWidth = 0;
+ for (int i=0; i<rows.size(); i++) {
+ int length = rows.get(i).length();
+ if (length > maxWidth) {
+ maxWidth = length;
+ }
+ }
}
}
+
+ /**
+ * Return the ith row of the column as a set of wrapped strings, each at
+ * most wrapWidth in length.
+ */
+ String[] getRow(int idx) {
+ String raw = rows.get(idx);
+ // Line-wrap if it's too long
+ String[] lines = new String[] {raw};
+ if (wrap) {
+ lines = WordUtils.wrap(lines[0], wrapWidth, "\n", true).split("\n");
+ }
+ for (int i=0; i<lines.length; i++) {
+ if (justification == Justification.LEFT) {
+ lines[i] = StringUtils.rightPad(lines[i], maxWidth);
+ } else if (justification == Justification.RIGHT) {
+ lines[i] = StringUtils.leftPad(lines[i], maxWidth);
+ }
+ }
+ return lines;
+ }
}
public static class Builder {
private final LinkedList<Column> columns = new LinkedList<Column>();
+ private boolean showHeader = true;
+ private int wrapWidth = Integer.MAX_VALUE;
/**
* Create a new Builder.
@@ -74,14 +125,63 @@ public class TableListing {
}
/**
- * Add a new field to the Table under construction.
- *
- * @param title Field title.
- * @param leftJustified Whether or not the field is left justified.
- * @return this.
+ * See {@link #addField(String, Justification, boolean)
+ */
+ public Builder addField(String title) {
+ return addField(title, Justification.LEFT, false);
+ }
+
+ /**
+ * See {@link #addField(String, Justification, boolean)
*/
public Builder addField(String title, Justification justification) {
- columns.add(new Column(title, justification));
+ return addField(title, justification, false);
+ }
+
+ /**
+ * See {@link #addField(String, Justification, boolean)
+ */
+ public Builder addField(String title, boolean wrap) {
+ return addField(title, Justification.LEFT, wrap);
+ }
+
+ /**
+ * Add a new field to the Table under construction.
+ *
+ * @param title Field title.
+ * @param justification Right or left justification. Defaults to left.
+ * @Param wrapWidth Width at which to auto-wrap the content of the cell.
+ * Defaults to Integer.MAX_VALUE.
+ * @return This Builder object
+ */
+ public Builder addField(String title, Justification justification,
+ boolean wrap) {
+ columns.add(new Column(title, justification, wrap));
+ return this;
+ }
+
+ /**
+ * Whether to hide column headers in table output
+ */
+ public Builder hideHeaders() {
+ this.showHeader = false;
+ return this;
+ }
+
+ /**
+ * Whether to show column headers in table output. This is the default.
+ */
+ public Builder showHeaders() {
+ this.showHeader = true;
+ return this;
+ }
+
+ /**
+ * Set the maximum width of a row in the TableListing. Must have one or
+ * more wrappable fields for this to take effect.
+ */
+ public Builder wrapWidth(int width) {
+ this.wrapWidth = width;
return this;
}
@@ -89,17 +189,22 @@ public class TableListing {
* Create a new TableListing.
*/
public TableListing build() {
- return new TableListing(columns.toArray(new Column[0]));
+ return new TableListing(columns.toArray(new Column[0]), showHeader,
+ wrapWidth);
}
}
private final Column columns[];
private int numRows;
+ private boolean showHeader;
+ private int wrapWidth;
- TableListing(Column columns[]) {
+ TableListing(Column columns[], boolean showHeader, int wrapWidth) {
this.columns = columns;
this.numRows = 0;
+ this.showHeader = showHeader;
+ this.wrapWidth = wrapWidth;
}
/**
@@ -107,7 +212,7 @@ public class TableListing {
*
* @param row The row of objects to add-- one per column.
*/
- public void addRow(String row[]) {
+ public void addRow(String... row) {
if (row.length != columns.length) {
throw new RuntimeException("trying to add a row with " + row.length +
" columns, but we have " + columns.length + " columns.");
@@ -118,19 +223,67 @@ public class TableListing {
numRows++;
}
- /**
- * Convert the table to a string.
- */
- public String build() {
+ @Override
+ public String toString() {
StringBuilder builder = new StringBuilder();
- for (int i = 0; i < numRows + 1; i++) {
- String prefix = "";
+ // Calculate the widths of each column based on their maxWidths and
+ // the wrapWidth for the entire table
+ int width = (columns.length-1)*2; // inter-column padding
+ for (int i=0; i<columns.length; i++) {
+ width += columns[i].maxWidth;
+ }
+ // Decrease the column size of wrappable columns until the goal width
+ // is reached, or we can't decrease anymore
+ while (width > wrapWidth) {
+ boolean modified = false;
+ for (int i=0; i<columns.length; i++) {
+ Column column = columns[i];
+ if (column.wrap) {
+ int maxWidth = column.getMaxWidth();
+ if (maxWidth > 4) {
+ column.setWrapWidth(maxWidth-1);
+ modified = true;
+ width -= 1;
+ if (width <= wrapWidth) {
+ break;
+ }
+ }
+ }
+ }
+ if (!modified) {
+ break;
+ }
+ }
+
+ int startrow = 0;
+ if (!showHeader) {
+ startrow = 1;
+ }
+ String[][] columnLines = new String[columns.length][];
+ for (int i = startrow; i < numRows + 1; i++) {
+ int maxColumnLines = 0;
for (int j = 0; j < columns.length; j++) {
- builder.append(prefix);
- prefix = " ";
- builder.append(columns[j].getRow(i));
+ columnLines[j] = columns[j].getRow(i);
+ if (columnLines[j].length > maxColumnLines) {
+ maxColumnLines = columnLines[j].length;
+ }
+ }
+
+ for (int c = 0; c < maxColumnLines; c++) {
+ // First column gets no left-padding
+ String prefix = "";
+ for (int j = 0; j < columns.length; j++) {
+ // Prepend padding
+ builder.append(prefix);
+ prefix = " ";
+ if (columnLines[j].length > c) {
+ builder.append(columnLines[j][c]);
+ } else {
+ builder.append(StringUtils.repeat(" ", columns[j].maxWidth));
+ }
+ }
+ builder.append("\n");
}
- builder.append("\n");
}
return builder.toString();
}
Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java?rev=1529334&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java Fri Oct 4 22:28:23 2013
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.cli;
+
+import static org.junit.Assert.assertTrue;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.cli.util.CLICommand;
+import org.apache.hadoop.cli.util.CLICommandCacheAdmin;
+import org.apache.hadoop.cli.util.CLICommandTypes;
+import org.apache.hadoop.cli.util.CLITestCmd;
+import org.apache.hadoop.cli.util.CacheAdminCmdExecutor;
+import org.apache.hadoop.cli.util.CommandExecutor;
+import org.apache.hadoop.cli.util.CommandExecutor.Result;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.HDFSPolicyProvider;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.tools.CacheAdmin;
+import org.apache.hadoop.security.authorize.PolicyProvider;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.xml.sax.SAXException;
+
+public class TestCacheAdminCLI extends CLITestHelper {
+
+ public static final Log LOG = LogFactory.getLog(TestCacheAdminCLI.class);
+
+ protected MiniDFSCluster dfsCluster = null;
+ protected FileSystem fs = null;
+ protected String namenode = null;
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
+ HDFSPolicyProvider.class, PolicyProvider.class);
+
+ // Many of the tests expect a replication value of 1 in the output
+ conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
+
+ dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
+
+ dfsCluster.waitClusterUp();
+ namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///");
+ username = System.getProperty("user.name");
+
+ fs = dfsCluster.getFileSystem();
+ assertTrue("Not a HDFS: "+fs.getUri(),
+ fs instanceof DistributedFileSystem);
+ }
+
+ @After
+ @Override
+ public void tearDown() throws Exception {
+ if (fs != null) {
+ fs.close();
+ }
+ if (dfsCluster != null) {
+ dfsCluster.shutdown();
+ }
+ Thread.sleep(2000);
+ super.tearDown();
+ }
+
+ @Override
+ protected String getTestFile() {
+ return "testCacheAdminConf.xml";
+ }
+
+ @Override
+ protected TestConfigFileParser getConfigParser() {
+ return new TestConfigFileParserCacheAdmin();
+ }
+
+ private class TestConfigFileParserCacheAdmin extends
+ CLITestHelper.TestConfigFileParser {
+ @Override
+ public void endElement(String uri, String localName, String qName)
+ throws SAXException {
+ if (qName.equals("cache-admin-command")) {
+ if (testCommands != null) {
+ testCommands.add(new CLITestCmdCacheAdmin(charString,
+ new CLICommandCacheAdmin()));
+ } else if (cleanupCommands != null) {
+ cleanupCommands.add(new CLITestCmdCacheAdmin(charString,
+ new CLICommandCacheAdmin()));
+ }
+ } else {
+ super.endElement(uri, localName, qName);
+ }
+ }
+ }
+
+ private class CLITestCmdCacheAdmin extends CLITestCmd {
+
+ public CLITestCmdCacheAdmin(String str, CLICommandTypes type) {
+ super(str, type);
+ }
+
+ @Override
+ public CommandExecutor getExecutor(String tag)
+ throws IllegalArgumentException {
+ if (getType() instanceof CLICommandCacheAdmin) {
+ return new CacheAdminCmdExecutor(tag, new CacheAdmin(conf));
+ }
+ return super.getExecutor(tag);
+ }
+ }
+
+ @Override
+ protected Result execute(CLICommand cmd) throws Exception {
+ return cmd.getExecutor("").executeCommand(cmd.getCmd());
+ }
+
+ @Test
+ @Override
+ public void testAll () {
+ super.testAll();
+ }
+}
Propchange: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
------------------------------------------------------------------------------
svn:eol-style = native
Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java?rev=1529334&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java Fri Oct 4 22:28:23 2013
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.cli.util;
+
+public class CLICommandCacheAdmin implements CLICommandTypes {
+}
Propchange: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java
------------------------------------------------------------------------------
svn:eol-style = native
Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java?rev=1529334&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java Fri Oct 4 22:28:23 2013
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.cli.util;
+
+import org.apache.hadoop.hdfs.tools.CacheAdmin;
+import org.apache.hadoop.util.ToolRunner;
+
+public class CacheAdminCmdExecutor extends CommandExecutor {
+ protected String namenode = null;
+ protected CacheAdmin admin = null;
+
+ public CacheAdminCmdExecutor(String namenode, CacheAdmin admin) {
+ this.namenode = namenode;
+ this.admin = admin;
+ }
+
+ @Override
+ protected void execute(final String cmd) throws Exception {
+ String[] args = getCommandAsArgs(cmd, "NAMENODE", this.namenode);
+ ToolRunner.run(admin, args);
+ }
+}
Propchange: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java
------------------------------------------------------------------------------
svn:eol-style = native
Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml?rev=1529334&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml Fri Oct 4 22:28:23 2013
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="testConf.xsl"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration>
+ <!-- Normal mode is test. To run just the commands and dump the output
+ to the log, set it to nocompare -->
+ <mode>test</mode>
+
+ <!-- Comparator types:
+ ExactComparator
+ SubstringComparator
+ RegexpComparator
+ TokenComparator
+ -->
+ <tests>
+
+ <test> <!--Tested -->
+ <description>Testing basic usage</description>
+ <test-commands>
+ <cache-admin-command></cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Usage: bin/hdfs cacheadmin [COMMAND]</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing listing no cache pools</description>
+ <test-commands>
+ <cache-admin-command>-listPools</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 0 results.</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing adding a cache pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool foo</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool foo</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Successfully added cache pool foo.</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing modifying a cache pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool poolparty -owner alice -group alicegroup -mode 0000 -weight 50</cache-admin-command>
+ <cache-admin-command>-modifyPool poolparty -owner bob -group bobgroup -mode 0777 -weight 51</cache-admin-command>
+ <cache-admin-command>-listPools</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool poolparty</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>poolparty bob bobgroup rwxrwxrwx 51</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing deleting a cache pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool foo</cache-admin-command>
+ <cache-admin-command>-removePool foo</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Successfully removed cache pool foo.</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing listing all cache pools</description>
+ <test-commands>
+ <cache-admin-command>-addPool foo -owner bob -group bob -mode 0664</cache-admin-command>
+ <cache-admin-command>-addPool bar -owner alice -group alicegroup -mode 0755</cache-admin-command>
+ <cache-admin-command>-listPools</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool foo</cache-admin-command>
+ <cache-admin-command>-removePool bar</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 2 results.</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>bar alice alicegroup rwxr-xr-x 100 </expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>foo bob bob rw-rw-r-- 100 </expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing listing a single cache pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool foo -owner bob -group bob -mode 0664</cache-admin-command>
+ <cache-admin-command>-addPool bar -owner alice -group alicegroup -mode 0755</cache-admin-command>
+ <cache-admin-command>-listPools foo</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool foo</cache-admin-command>
+ <cache-admin-command>-removePool bar</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 1 result.</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>foo bob bob rw-rw-r-- 100 </expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing creating cache paths</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPath -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addPath -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-listPaths -pool pool1</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 2 entries</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>1 pool1 /foo</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>2 pool1 /bar</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing removing cache paths</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPath -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addPath -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ <cache-admin-command>-listPaths -pool pool1</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 0 entries</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ </tests>
+</configuration>
Propchange: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml
------------------------------------------------------------------------------
svn:eol-style = native
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml?rev=1529334&r1=1529333&r2=1529334&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml Fri Oct 4 22:28:23 2013
@@ -16521,70 +16521,5 @@
</comparator>
</comparators>
</test>
-
- <test> <!--Tested -->
- <description>Testing listing no cache pools</description>
- <test-commands>
- <dfs-admin-command>-fs NAMENODE -listCachePools</dfs-admin-command>
- </test-commands>
- <cleanup-commands>
- </cleanup-commands>
- <comparators>
- <comparator>
- <type>SubstringComparator</type>
- <expected-output>Found 0 results.</expected-output>
- </comparator>
- </comparators>
- </test>
-
- <test> <!--Tested -->
- <description>Testing adding a cache pool</description>
- <test-commands>
- <dfs-admin-command>-fs NAMENODE -addCachePool foo</dfs-admin-command>
- </test-commands>
- <cleanup-commands>
- <dfs-admin-command>-fs NAMENODE -removeCachePool foo</dfs-admin-command>
- </cleanup-commands>
- <comparators>
- <comparator>
- <type>SubstringComparator</type>
- <expected-output>Successfully added cache pool foo.</expected-output>
- </comparator>
- </comparators>
- </test>
-
- <test> <!--Tested -->
- <description>Testing deleting a cache pool</description>
- <test-commands>
- <dfs-admin-command>-fs NAMENODE -addCachePool foo</dfs-admin-command>
- <dfs-admin-command>-fs NAMENODE -removeCachePool foo</dfs-admin-command>
- </test-commands>
- <cleanup-commands>
- </cleanup-commands>
- <comparators>
- <comparator>
- <type>SubstringComparator</type>
- <expected-output>Successfully removed cache pool foo.</expected-output>
- </comparator>
- </comparators>
- </test>
-
- <test> <!--Tested -->
- <description>Testing listing a cache pool</description>
- <test-commands>
- <dfs-admin-command>-fs NAMENODE -addCachePool foo -owner bob -group bob -mode 0664</dfs-admin-command>
- <dfs-admin-command>-fs NAMENODE -listCachePools foo</dfs-admin-command>
- </test-commands>
- <cleanup-commands>
- <dfs-admin-command>-fs NAMENODE -removeCachePool foo</dfs-admin-command>
- </cleanup-commands>
- <comparators>
- <comparator>
- <type>SubstringComparator</type>
- <expected-output>bob bob rw-rw-r-- 100</expected-output>
- </comparator>
- </comparators>
- </test>
-
</tests>
</configuration>