You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cm...@apache.org on 2013/09/12 05:55:11 UTC

svn commit: r1522272 [2/2] - in /hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/bin/ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/protocol/ src/main/java/org/apache/hadoop/hdfs/protocolPB/ sr...

Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java?rev=1522272&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java Thu Sep 12 03:55:10 2013
@@ -0,0 +1,333 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.tools.TableListing.Justification;
+import org.apache.hadoop.util.Fallible;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * This class implements command-line operations on the HDFS Cache.
+ */
+@InterfaceAudience.Private
+public class CacheAdmin {
+  private static Configuration conf = new Configuration();
+
+  private static DistributedFileSystem getDFS() throws IOException {
+    FileSystem fs = FileSystem.get(conf);
+    if (!(fs instanceof DistributedFileSystem)) {
+      throw new IllegalArgumentException("FileSystem " + fs.getUri() + 
+      " is not an HDFS file system");
+    }
+    return (DistributedFileSystem)fs;
+  }
+
+  interface Command {
+    String getName();
+    String getShortUsage();
+    String getLongUsage();
+    int run(List<String> args) throws IOException;
+  }
+
+  private static class AddPathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-addPath";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-addPath -path <path> -pool <pool-name>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Adds a new PathBasedCache directive.\n" +
+        "<path>  The new path to cache.\n" + 
+        "        Paths may be either directories or files.\n" +
+        "<pool-name> The pool which this directive will reside in.\n" + 
+        "        You must have write permission on the cache pool in order\n" +
+        "        to add new entries to it.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String path = StringUtils.popOptionWithArgument("-path", args);
+      if (path == null) {
+        System.err.println("You must specify a path with -path.");
+        return 1;
+      }
+      String poolName = StringUtils.popOptionWithArgument("-pool", args);
+      if (poolName == null) {
+        System.err.println("You must specify a pool name with -pool.");
+        return 1;
+      }
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+        
+      DistributedFileSystem dfs = getDFS();
+      List<PathBasedCacheDirective> directives =
+          new LinkedList<PathBasedCacheDirective>();
+      PathBasedCacheDirective directive = new PathBasedCacheDirective(path, poolName);
+      directives.add(directive);
+      List<Fallible<PathBasedCacheEntry>> results =
+          dfs.addPathBasedCacheDirective(directives);
+      try {
+        PathBasedCacheEntry entry = results.get(0).get();
+        System.out.println("Added PathBasedCache entry " + entry.getEntryId());
+        return 0;
+      } catch (IOException e) {
+        System.err.println("Error adding cache directive " + directive + ": " +
+          e.getMessage());
+        return 1;
+      }
+    }
+  }
+
+  private static class RemovePathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-removePath";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-removePath <id>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Remove a cache directive.\n" +
+        "<id>    The id of the cache directive to remove.\n" + 
+        "        You must have write permission on the pool where the\n" +
+        "        directive resides in order to remove it.  To see a list\n" +
+        "        of PathBasedCache directive IDs, use the -list command.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String idString= StringUtils.popFirstNonOption(args);
+      if (idString == null) {
+        System.err.println("You must specify a directive ID to remove.");
+        return 1;
+      }
+      long id = Long.valueOf(idString);
+      if (id <= 0) {
+        System.err.println("Invalid directive ID " + id + ": ids must " +
+            "be greater than 0.");
+        return 1;
+      }
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+      DistributedFileSystem dfs = getDFS();
+      List<Long> ids = new LinkedList<Long>();
+      ids.add(id);
+      List<Fallible<Long>> results = dfs.removePathBasedCacheEntries(ids);
+      try {
+        Long resultId = results.get(0).get();
+        System.out.println("Removed PathBasedCache entry " + resultId);
+        return 0;
+      } catch (IOException e) {
+        System.err.println("Error removing cache directive " + id + ": " +
+          e.getMessage());
+        return 1;
+      }
+    }
+  }
+
+  private static class ListPathBasedCacheDirectiveCommand implements Command {
+    @Override
+    public String getName() {
+      return "-listPaths";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-listPaths [-path <path>] [-pool <pool-name>]]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "List PathBasedCache directives.\n" +
+        "<path> If a -path argument is given, we will list only\n" +
+        "        PathBasedCache entries with this path.\n" +
+        "        Note that if there is a PathBasedCache directive for <path>\n" +
+        "        in a cache pool that we don't have read access for, it\n" + 
+        "        not be listed.  If there are unreadable cache pools, a\n" +
+        "        message will be printed.\n" +
+        "        may be incomplete.\n" +
+        "<pool-name> If a -pool argument is given, we will list only path\n" +
+        "        cache entries in that pool.\n";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      String pathFilter = StringUtils.popOptionWithArgument("-path", args);
+      String poolFilter = StringUtils.popOptionWithArgument("-pool", args);
+      if (!args.isEmpty()) {
+        System.err.println("Can't understand argument: " + args.get(0));
+        return 1;
+      }
+      TableListing tableListing = new TableListing.Builder().
+          addField("ID", Justification.RIGHT).
+          addField("POOL", Justification.LEFT).
+          addField("PATH", Justification.LEFT).
+          build();
+      DistributedFileSystem dfs = getDFS();
+      RemoteIterator<PathBasedCacheEntry> iter =
+          dfs.listPathBasedCacheEntries(poolFilter, pathFilter);
+      int numEntries = 0;
+      while (iter.hasNext()) {
+        PathBasedCacheEntry entry = iter.next();
+        String row[] = new String[] {
+            "" + entry.getEntryId(),
+            entry.getDirective().getPool(),
+            entry.getDirective().getPath(),
+        };
+        tableListing.addRow(row);
+        numEntries++;
+      }
+      System.out.print(String.format("Found %d entr%s\n",
+          numEntries, numEntries == 1 ? "y" : "ies"));
+      if (numEntries > 0) {
+        System.out.print(tableListing.build());
+      }
+      return 0;
+    }
+  }
+
+  private static class HelpCommand implements Command {
+    @Override
+    public String getName() {
+      return "-help";
+    }
+
+    @Override
+    public String getShortUsage() {
+      return "[-help <command-name>]\n";
+    }
+
+    @Override
+    public String getLongUsage() {
+      return getShortUsage() +
+        "Get detailed help about a command.\n" +
+        "<command-name> The command to get detailed help for.  If no " +
+        "        command-name is specified, we will print detailed help " +
+        "        about all commands";
+    }
+
+    @Override
+    public int run(List<String> args) throws IOException {
+      if (args.size() == 0) {
+        for (Command command : COMMANDS) {
+          System.err.println(command.getLongUsage());
+        }
+        return 0;
+      }
+      if (args.size() != 1) {
+        System.out.println("You must give exactly one argument to -help.");
+        return 0;
+      }
+      String commandName = args.get(0);
+      commandName.replaceAll("^[-]*", "");
+      Command command = determineCommand(commandName);
+      if (command == null) {
+        System.err.print("Sorry, I don't know the command '" +
+          commandName + "'.\n");
+        System.err.print("Valid command names are:\n");
+        String separator = "";
+        for (Command c : COMMANDS) {
+          System.err.print(separator + c.getName());
+          separator = ", ";
+        }
+        return 1;
+      }
+      System.err.print(command.getLongUsage());
+      return 0;
+    }
+  }
+
+  private static Command[] COMMANDS = {
+    new AddPathBasedCacheDirectiveCommand(),
+    new RemovePathBasedCacheDirectiveCommand(),
+    new ListPathBasedCacheDirectiveCommand(),
+    new HelpCommand(),
+  };
+
+  private static void printUsage(boolean longUsage) {
+    System.err.println(
+        "Usage: bin/hdfs cacheadmin [COMMAND]");
+    for (Command command : COMMANDS) {
+      if (longUsage) {
+        System.err.print(command.getLongUsage());
+      } else {
+        System.err.print("          " + command.getShortUsage());
+      }
+    }
+    System.err.println();
+  }
+
+  private static Command determineCommand(String commandName) {
+    for (int i = 0; i < COMMANDS.length; i++) {
+      if (COMMANDS[i].getName().equals(commandName)) {
+        return COMMANDS[i];
+      }
+    }
+    return null;
+  }
+
+  public static void main(String[] argsArray) throws IOException {
+    if (argsArray.length == 0) {
+      printUsage(false);
+      System.exit(1);
+    }
+    Command command = determineCommand(argsArray[0]);
+    if (command == null) {
+      System.err.println("Can't understand command '" + argsArray[0] + "'");
+      if (!argsArray[0].startsWith("-")) {
+        System.err.println("Command names must start with dashes.");
+      }
+      printUsage(false);
+      System.exit(1);
+    }
+    List<String> args = new LinkedList<String>();
+    for (int j = 1; j < argsArray.length; j++) {
+      args.add(argsArray[j]);
+    }
+    System.exit(command.run(args));
+  }
+}

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Thu Sep 12 03:55:10 2013
@@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.protocol.H
 import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.TransferFsImage;
+import org.apache.hadoop.hdfs.tools.TableListing.Justification;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.net.NetUtils;
@@ -634,15 +635,6 @@ public class DFSAdmin extends FsShell {
   final private static String LIST_CACHE_POOLS_USAGE =
       "-listCachePools] [-verbose] [name]";
 
-  private void listCachePool(CachePoolInfo info) {
-    System.out.print(String.format("%s\n", info.getPoolName()));
-    System.out.print(String.format("owner:\t%s\n", info.getOwnerName()));
-    System.out.print(String.format("group:\t%s\n", info.getGroupName()));
-    System.out.print(String.format("mode:\t%s\n", info.getMode()));
-    System.out.print(String.format("weight:\t%d\n", info.getWeight()));
-    System.out.print("\n");
-  }
-
   public int listCachePools(String argsArray[], int idx) throws IOException {
     List<String> args = new LinkedList<String>();
     for (int i = idx; i < argsArray.length; i++) {
@@ -655,39 +647,44 @@ public class DFSAdmin extends FsShell {
       System.err.println("usage is " + LIST_CACHE_POOLS_USAGE);
       return 1;
     }
-    boolean gotResults = false;
     DistributedFileSystem dfs = getDFS();
+    TableListing listing = new TableListing.Builder().
+        addField("NAME", Justification.LEFT).
+        addField("OWNER", Justification.LEFT).
+        addField("GROUP", Justification.LEFT).
+        addField("MODE", Justification.LEFT).
+        addField("WEIGHT", Justification.RIGHT).
+        build();
+    int numResults = 0;
     try {
       RemoteIterator<CachePoolInfo> iter = dfs.listCachePools();
-      if (name != null) {
-        while (iter.hasNext()) {
-          CachePoolInfo info = iter.next();
-          if (info.getPoolName().equals(name)) {
-            listCachePool(info);
-            gotResults = true;
-            return 0;
+      while (iter.hasNext()) {
+        CachePoolInfo info = iter.next();
+        if (name == null || info.getPoolName().equals(name)) {
+          listing.addRow(new String[] {
+              info.getPoolName(),
+              info.getOwnerName(),
+              info.getGroupName(),
+              info.getMode().toString(),
+              info.getWeight().toString(),
+          });
+          ++numResults;
+          if (name != null) {
+            break;
           }
         }
-      } else {
-        while (iter.hasNext()) {
-          listCachePool(iter.next());
-          gotResults = true;
-        }
       }
     } catch (IOException e) {
       throw new RemoteException(e.getClass().getName(), e.getMessage());
     }
-    int ret = 0;
-    if (!gotResults) {
-      if (name != null) {
-        System.out.println("No cache pool named " + name + " found.");
-        ret = 1;
-      } else {
-        System.out.println("No cache pools found.");
-        ret = 1;
-      }
-    }
-    return ret;
+    System.out.print(String.format("Found %d result%s.\n", numResults,
+        (numResults == 1 ? "" : "s")));
+    if (numResults > 0) { 
+      System.out.print(listing.build());
+    }
+    // If there are no results, we return 1 (failure exit code);
+    // otherwise we return 0 (success exit code).
+    return (numResults == 0) ? 1 : 0;
   }
 
   public int rollEdits() throws IOException {

Added: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java?rev=1522272&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java (added)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java Thu Sep 12 03:55:10 2013
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools;
+
+import java.util.LinkedList;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * This class implements a "table listing" with column headers.
+ */
+@InterfaceAudience.Private
+public class TableListing {
+  public enum Justification {
+    LEFT,
+    RIGHT;
+  }
+
+  private static class Column {
+    private final LinkedList<String> rows;
+    private final Justification justification;
+    private int maxLength;
+
+    Column(String title, Justification justification) {
+      this.rows = new LinkedList<String>();
+      this.justification = justification;
+      this.maxLength = 0;
+      addRow(title);
+    }
+
+    private void addRow(String val) {
+      if ((val.length() + 1) > maxLength) {
+        maxLength = val.length() + 1;
+      }
+      rows.add(val);
+    }
+
+    String getRow(int i) {
+      String raw = rows.get(i);
+      int paddingLength = maxLength - raw.length();
+      String padding = (paddingLength <= 0) ? "" :
+        StringUtils.repeat(" ", paddingLength);
+      if (justification == Justification.LEFT) {
+        return raw + padding;
+      } else {
+        return padding + raw;
+      }
+    }
+  }
+
+  public static class Builder {
+    private final LinkedList<Column> columns = new LinkedList<Column>();
+
+    /**
+     * Create a new Builder.
+     */
+    public Builder() {
+    }
+
+    /**
+     * Add a new field to the Table under construction.
+     *
+     * @param title          Field title.
+     * @param leftJustified  Whether or not the field is left justified.
+     * @return               this.
+     */
+    public Builder addField(String title, Justification justification) {
+      columns.add(new Column(title, justification));
+      return this;
+    }
+
+    /**
+     * Create a new TableListing.
+     */
+    public TableListing build() {
+      return new TableListing(columns.toArray(new Column[0]));
+    }
+  }
+
+  private final Column columns[];
+
+  private int numRows;
+
+  TableListing(Column columns[]) {
+    this.columns = columns;
+    this.numRows = 0;
+  }
+
+  /**
+   * Add a new row.
+   *
+   * @param row    The row of objects to add-- one per column.
+   */
+  public void addRow(String row[]) {
+    if (row.length != columns.length) {
+      throw new RuntimeException("trying to add a row with " + row.length +
+            " columns, but we have " + columns.length + " columns.");
+    }
+    for (int i = 0; i < columns.length; i++) {
+      columns[i].addRow(row[i]);
+    }
+    numRows++;
+  }
+
+  /**
+   * Convert the table to a string.
+   */
+  public String build() {
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < numRows + 1; i++) {
+      String prefix = "";
+      for (int j = 0; j < columns.length; j++) {
+        builder.append(prefix);
+        prefix = " ";
+        builder.append(columns[j].getRow(i));
+      }
+      builder.append("\n");
+    }
+    return builder.toString();
+  }
+}

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto Thu Sep 12 03:55:10 2013
@@ -363,54 +363,56 @@ message IsFileClosedResponseProto {
   required bool result = 1;
 }
 
-message PathCacheDirectiveProto {
+message PathBasedCacheDirectiveProto {
   required string path = 1;
   required string pool = 2;
 }
 
-message AddPathCacheDirectivesRequestProto {
-  repeated PathCacheDirectiveProto elements = 1;
+message AddPathBasedCacheDirectivesRequestProto {
+  repeated PathBasedCacheDirectiveProto elements = 1;
 }
 
-message AddPathCacheDirectivesResponseProto {
+message AddPathBasedCacheDirectivesResponseProto {
   repeated int64 results = 1 [packed=true];
 }
 
-enum AddPathCacheDirectiveErrorProto {
-  EMPTY_PATH_ERROR = -1;
-  INVALID_PATH_NAME_ERROR = -2;
-  INVALID_POOL_NAME_ERROR = -3;
-  UNEXPECTED_ADD_ERROR = -4;
+enum AddPathBasedCacheDirectiveErrorProto {
+  UNEXPECTED_ADD_ERROR = -1;
+  EMPTY_PATH_ERROR = -2;
+  INVALID_PATH_NAME_ERROR = -3;
+  INVALID_POOL_NAME_ERROR = -4;
+  ADD_PERMISSION_DENIED_ERROR = -5;
 }
 
-message RemovePathCacheEntriesRequestProto {
+message RemovePathBasedCacheEntriesRequestProto {
   repeated int64 elements = 1 [packed=true];
 }
 
-message RemovePathCacheEntriesResponseProto {
+message RemovePathBasedCacheEntriesResponseProto {
   repeated int64 results = 1 [packed=true];
 }
 
-enum RemovePathCacheEntryErrorProto {
-  INVALID_CACHED_PATH_ID_ERROR = -1;
-  NO_SUCH_CACHED_PATH_ID_ERROR = -2;
-  REMOVE_PERMISSION_DENIED_ERROR = -3;
-  UNEXPECTED_REMOVE_ERROR = -4;
+enum RemovePathBasedCacheEntryErrorProto {
+  UNEXPECTED_REMOVE_ERROR = -1;
+  INVALID_CACHED_PATH_ID_ERROR = -2;
+  NO_SUCH_CACHED_PATH_ID_ERROR = -3;
+  REMOVE_PERMISSION_DENIED_ERROR = -4;
 }
 
-message ListPathCacheEntriesRequestProto {
+message ListPathBasedCacheEntriesRequestProto {
   required int64 prevId = 1;
-  required string pool = 2;
+  optional string pool = 2;
+  optional string path = 3;
 }
 
-message ListPathCacheEntriesElementProto {
+message ListPathBasedCacheEntriesElementProto {
   required int64 id = 1;
-  required string path = 2;
-  required string pool = 3;
+  required string pool = 2;
+  required string path = 3;
 }
 
-message ListPathCacheEntriesResponseProto {
-  repeated ListPathCacheEntriesElementProto elements = 1;
+message ListPathBasedCacheEntriesResponseProto {
+  repeated ListPathBasedCacheEntriesElementProto elements = 1;
   required bool hasMore = 2;
 }
 
@@ -449,7 +451,7 @@ message ListCachePoolsRequestProto {
 
 message ListCachePoolsResponseProto {
   repeated ListCachePoolsResponseElementProto elements = 1;
-  optional bool hasMore = 2;
+  required bool hasMore = 2;
 }
 
 message ListCachePoolsResponseElementProto {
@@ -641,12 +643,12 @@ service ClientNamenodeProtocol {
       returns(ListCorruptFileBlocksResponseProto);
   rpc metaSave(MetaSaveRequestProto) returns(MetaSaveResponseProto);
   rpc getFileInfo(GetFileInfoRequestProto) returns(GetFileInfoResponseProto);
-  rpc addPathCacheDirectives(AddPathCacheDirectivesRequestProto)
-      returns (AddPathCacheDirectivesResponseProto);
-  rpc removePathCacheEntries(RemovePathCacheEntriesRequestProto)
-      returns (RemovePathCacheEntriesResponseProto);
-  rpc listPathCacheEntries(ListPathCacheEntriesRequestProto)
-      returns (ListPathCacheEntriesResponseProto);
+  rpc addPathBasedCacheDirectives(AddPathBasedCacheDirectivesRequestProto)
+      returns (AddPathBasedCacheDirectivesResponseProto);
+  rpc removePathBasedCacheEntries(RemovePathBasedCacheEntriesRequestProto)
+      returns (RemovePathBasedCacheEntriesResponseProto);
+  rpc listPathBasedCacheEntries(ListPathBasedCacheEntriesRequestProto)
+      returns (ListPathBasedCacheEntriesResponseProto);
   rpc addCachePool(AddCachePoolRequestProto)
       returns(AddCachePoolResponseProto);
   rpc modifyCachePool(ModifyCachePoolRequestProto)

Copied: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java (from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java?p2=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java&p1=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java&r1=1522246&r2=1522272&rev=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java Thu Sep 12 03:55:10 2013
@@ -34,23 +34,23 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Fallible;
 import org.junit.Test;
 
-public class TestPathCacheRequests {
-  static final Log LOG = LogFactory.getLog(TestPathCacheRequests.class);
+public class TestPathBasedCacheRequests {
+  static final Log LOG = LogFactory.getLog(TestPathBasedCacheRequests.class);
 
   private static final UserGroupInformation unprivilegedUser =
       UserGroupInformation.createRemoteUser("unprivilegedUser");
@@ -101,11 +101,16 @@ public class TestPathCacheRequests {
     proto.addCachePool(new CachePoolInfo("pool1").
         setOwnerName("abc").setGroupName("123").
         setMode(new FsPermission((short)0755)).setWeight(150));
-    proto.modifyCachePool(new CachePoolInfo("pool1").
-        setOwnerName("def").setGroupName("456"));
     RemoteIterator<CachePoolInfo> iter = proto.listCachePools("");
     CachePoolInfo info = iter.next();
     assertEquals("pool1", info.getPoolName());
+    assertEquals("abc", info.getOwnerName());
+    assertEquals("123", info.getGroupName());
+    proto.modifyCachePool(new CachePoolInfo("pool1").
+        setOwnerName("def").setGroupName("456"));
+    iter = proto.listCachePools("");
+    info = iter.next();
+    assertEquals("pool1", info.getPoolName());
     assertEquals("def", info.getOwnerName());
     assertEquals("456", info.getGroupName());
     assertEquals(new FsPermission((short)0755), info.getMode());
@@ -127,16 +132,16 @@ public class TestPathCacheRequests {
   }
 
   private static void validateListAll(
-      RemoteIterator<PathCacheEntry> iter,
+      RemoteIterator<PathBasedCacheEntry> iter,
       long id0, long id1, long id2) throws Exception {
-    Assert.assertEquals(new PathCacheEntry(id0,
-        new PathCacheDirective("/alpha", "pool1")),
+    Assert.assertEquals(new PathBasedCacheEntry(id0,
+        new PathBasedCacheDirective("/alpha", "pool1")),
         iter.next());
-    Assert.assertEquals(new PathCacheEntry(id1,
-        new PathCacheDirective("/beta", "pool2")),
+    Assert.assertEquals(new PathBasedCacheEntry(id1,
+        new PathBasedCacheDirective("/beta", "pool2")),
         iter.next());
-    Assert.assertEquals(new PathCacheEntry(id2,
-        new PathCacheDirective("/gamma", "pool1")),
+    Assert.assertEquals(new PathBasedCacheEntry(id2,
+        new PathBasedCacheDirective("/gamma", "pool1")),
         iter.next());
     Assert.assertFalse(iter.hasNext());
   }
@@ -159,18 +164,19 @@ public class TestPathCacheRequests {
       proto.addCachePool(new CachePoolInfo("pool4").
           setMode(new FsPermission((short)0)));
 
-      List<Fallible<PathCacheEntry>> addResults1 = 
+      List<Fallible<PathBasedCacheEntry>> addResults1 = 
         unprivilegedUser.doAs(new PrivilegedExceptionAction<
-            List<Fallible<PathCacheEntry>>>() {
+            List<Fallible<PathBasedCacheEntry>>>() {
           @Override
-          public List<Fallible<PathCacheEntry>> run() throws IOException {
-            return proto.addPathCacheDirectives(Arrays.asList(
-              new PathCacheDirective[] {
-                new PathCacheDirective("/alpha", "pool1"),
-                new PathCacheDirective("/beta", "pool2"),
-                new PathCacheDirective("", "pool3"),
-                new PathCacheDirective("/zeta", "nonexistent_pool"),
-                new PathCacheDirective("/zeta", "pool4")
+          public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
+            return proto.addPathBasedCacheDirectives(Arrays.asList(
+              new PathBasedCacheDirective[] {
+                new PathBasedCacheDirective("/alpha", "pool1"),
+                new PathBasedCacheDirective("/beta", "pool2"),
+                new PathBasedCacheDirective("", "pool3"),
+                new PathBasedCacheDirective("/zeta", "nonexistent_pool"),
+                new PathBasedCacheDirective("/zeta", "pool4"),
+                new PathBasedCacheDirective("//illegal/path/", "pool1")
               }));
             }
           });
@@ -197,28 +203,36 @@ public class TestPathCacheRequests {
         Assert.assertTrue(ioe.getCause()
             instanceof PoolWritePermissionDeniedError);
       }
+      try {
+        addResults1.get(5).get();
+        Assert.fail("expected an error when adding a malformed path " +
+            "to the cache directives.");
+      } catch (IOException ioe) {
+        //Assert.assertTrue(ioe.getCause()
+            //instanceof PoolWritePermissionDeniedError);
+      }
 
-      List<Fallible<PathCacheEntry>> addResults2 = 
-          proto.addPathCacheDirectives(Arrays.asList(
-            new PathCacheDirective[] {
-        new PathCacheDirective("/alpha", "pool1"),
-        new PathCacheDirective("/theta", ""),
-        new PathCacheDirective("bogus", "pool1"),
-        new PathCacheDirective("/gamma", "pool1")
+      List<Fallible<PathBasedCacheEntry>> addResults2 = 
+          proto.addPathBasedCacheDirectives(Arrays.asList(
+            new PathBasedCacheDirective[] {
+        new PathBasedCacheDirective("/alpha", "pool1"),
+        new PathBasedCacheDirective("/theta", ""),
+        new PathBasedCacheDirective("bogus", "pool1"),
+        new PathBasedCacheDirective("/gamma", "pool1")
       }));
       long id = addResults2.get(0).get().getEntryId();
       Assert.assertEquals("expected to get back the same ID as last time " +
-          "when re-adding an existing path cache directive.", ids1[0], id);
+          "when re-adding an existing PathBasedCache directive.", ids1[0], id);
       try {
         addResults2.get(1).get();
-        Assert.fail("expected an error when adding a path cache " +
+        Assert.fail("expected an error when adding a PathBasedCache " +
             "directive with an empty pool name.");
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof InvalidPoolNameError);
       }
       try {
         addResults2.get(2).get();
-        Assert.fail("expected an error when adding a path cache " +
+        Assert.fail("expected an error when adding a PathBasedCache " +
             "directive with a non-absolute path name.");
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof InvalidPathNameError);
@@ -226,20 +240,20 @@ public class TestPathCacheRequests {
       long ids2[] = new long[1];
       ids2[0] = addResults2.get(3).get().getEntryId();
 
-      RemoteIterator<PathCacheEntry> iter =
-          proto.listPathCacheEntries(0, "");
+      RemoteIterator<PathBasedCacheEntry> iter =
+          proto.listPathBasedCacheEntries(0, null, null);
       validateListAll(iter, ids1[0], ids1[1], ids2[0]);
-      iter = proto.listPathCacheEntries(0, "");
+      iter = proto.listPathBasedCacheEntries(0, null, null);
       validateListAll(iter, ids1[0], ids1[1], ids2[0]);
-      iter = proto.listPathCacheEntries(0, "pool3");
+      iter = proto.listPathBasedCacheEntries(0, "pool3", null);
       Assert.assertFalse(iter.hasNext());
-      iter = proto.listPathCacheEntries(0, "pool2");
+      iter = proto.listPathBasedCacheEntries(0, "pool2", null);
       Assert.assertEquals(addResults1.get(1).get(),
           iter.next());
       Assert.assertFalse(iter.hasNext());
 
       List<Fallible<Long>> removeResults1 = 
-          proto.removePathCacheEntries(Arrays.asList(
+          proto.removePathBasedCacheEntries(Arrays.asList(
             new Long[] { ids1[1], -42L, 999999L }));
       Assert.assertEquals(Long.valueOf(ids1[1]),
           removeResults1.get(0).get());
@@ -255,7 +269,7 @@ public class TestPathCacheRequests {
       } catch (IOException ioe) {
         Assert.assertTrue(ioe.getCause() instanceof NoSuchIdException);
       }
-      iter = proto.listPathCacheEntries(0, "pool2");
+      iter = proto.listPathBasedCacheEntries(0, "pool2", null);
       Assert.assertFalse(iter.hasNext());
     } finally {
       if (cluster != null) { cluster.shutdown(); }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml Thu Sep 12 03:55:10 2013
@@ -16385,7 +16385,7 @@
       <comparators>
         <comparator>
           <type>SubstringComparator</type>
-          <expected-output>No cache pools found.</expected-output>
+          <expected-output>Found 0 results.</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -16434,7 +16434,7 @@
       <comparators>
         <comparator>
           <type>SubstringComparator</type>
-          <expected-output>foo</expected-output>
+          <expected-output>bob    bob    rw-rw-r--      100</expected-output>
         </comparator>
       </comparators>
     </test>