You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cm...@apache.org on 2013/09/06 20:52:51 UTC

svn commit: r1520665 [1/2] - in /hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/protocol/ src/main/java/org/apache/hadoop/hdfs/protocolPB/ src/main/java/or...

Author: cmccabe
Date: Fri Sep  6 18:52:50 2013
New Revision: 1520665

URL: http://svn.apache.org/r1520665
Log:
HDFS-5163. Miscellaneous cache pool RPC fixes (Contributed by Colin Patrick McCabe)

Modified:
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt Fri Sep  6 18:52:50 2013
@@ -24,6 +24,9 @@ HDFS-4949 (Unreleased)
     HDFS-5121. Add RPCs for creating and manipulating cache pools.
     (Contributed by Colin Patrick McCabe)
 
+    HDFS-5163. Miscellaneous cache pool RPC fixes.  (Contributed by Colin
+    Patrick McCabe)
+
 
   OPTIMIZATIONS
 

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Fri Sep  6 18:52:50 2013
@@ -195,6 +195,13 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_KEY = "dfs.datanode.socket.reuse.keepalive";
   public static final int     DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_DEFAULT = 1000;
   
+  public static final String  DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES =
+      "dfs.namenode.list.cache.pools.num.responses";
+  public static final int     DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT = 100;
+  public static final String  DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES =
+      "dfs.namenode.list.cache.directives.num.responses";
+  public static final int     DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT = 100;
+
   // Whether to enable datanode's stale state detection and usage for reads
   public static final String DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY = "dfs.namenode.avoid.read.stale.datanode";
   public static final boolean DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_DEFAULT = false;

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java Fri Sep  6 18:52:50 2013
@@ -56,12 +56,12 @@ public abstract class AddPathCacheDirect
     }
   }
 
-  public static class InvalidPoolError
+  public static class InvalidPoolNameError
       extends AddPathCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
-    public InvalidPoolError(PathCacheDirective directive) {
-      super("invalid pool id " + directive.getPoolId(), directive);
+    public InvalidPoolNameError(PathCacheDirective directive) {
+      super("invalid pool name '" + directive.getPool() + "'", directive);
     }
   }
 
@@ -70,7 +70,7 @@ public abstract class AddPathCacheDirect
     private static final long serialVersionUID = 1L;
 
     public PoolWritePermissionDeniedError(PathCacheDirective directive) {
-      super("write permission denied for pool id " + directive.getPoolId(),
+      super("write permission denied for pool '" + directive.getPool() + "'",
             directive);
     }
   }
@@ -82,9 +82,7 @@ public abstract class AddPathCacheDirect
     public UnexpectedAddPathCacheDirectiveException(
         PathCacheDirective directive) {
       super("encountered an unexpected error when trying to " +
-          "add path cache directive to pool id " + directive.getPoolId() +
-          " " + directive,
-          directive);
+          "add path cache directive " + directive, directive);
     }
   }
 };

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java Fri Sep  6 18:52:50 2013
@@ -18,45 +18,38 @@
 
 package org.apache.hadoop.hdfs.protocol;
 
+import javax.annotation.Nullable;
+
 import org.apache.commons.lang.builder.EqualsBuilder;
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.permission.FsPermission;
 
-import com.google.common.base.Preconditions;
-
 /**
  * Information about a cache pool.
- * 
- * CachePoolInfo permissions roughly map to Unix file permissions.
- * Write permissions allow addition and removal of a {@link PathCacheEntry} from
- * the pool. Execute permissions allow listing of PathCacheEntries in a pool.
- * Read permissions have no associated meaning.
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class CachePoolInfo {
+  final String poolName;
+
+  @Nullable
+  String ownerName;
+
+  @Nullable
+  String groupName;
+
+  @Nullable
+  FsPermission mode;
+
+  @Nullable
+  Integer weight;
 
-  private String poolName;
-  private String ownerName;
-  private String groupName;
-  private FsPermission mode;
-  private Integer weight;
-
-  /**
-   * For Builder use
-   */
-  private CachePoolInfo() {}
-
-  /**
-   * Use a CachePoolInfo {@link Builder} to create a new CachePoolInfo with
-   * more parameters
-   */
   public CachePoolInfo(String poolName) {
     this.poolName = poolName;
   }
-
+  
   public String getPoolName() {
     return poolName;
   }
@@ -65,103 +58,73 @@ public class CachePoolInfo {
     return ownerName;
   }
 
+  public CachePoolInfo setOwnerName(String ownerName) {
+    this.ownerName = ownerName;
+    return this;
+  }
+
   public String getGroupName() {
     return groupName;
   }
 
+  public CachePoolInfo setGroupName(String groupName) {
+    this.groupName = groupName;
+    return this;
+  }
+  
   public FsPermission getMode() {
     return mode;
   }
 
+  public CachePoolInfo setMode(FsPermission mode) {
+    this.mode = mode;
+    return this;
+  }
+
   public Integer getWeight() {
     return weight;
   }
 
-  public String toString() {
-    return new StringBuilder().
-        append("{ ").append("poolName:").append(poolName).
-        append(", ownerName:").append(ownerName).
-        append(", groupName:").append(groupName).
-        append(", mode:").append(mode).
-        append(", weight:").append(weight).
-        append(" }").toString();
+  public CachePoolInfo setWeight(Integer weight) {
+    this.weight = weight;
+    return this;
   }
 
-  @Override
-  public int hashCode() {
-    return new HashCodeBuilder().append(poolName).append(ownerName)
-        .append(groupName).append(mode.toShort()).append(weight).hashCode();
+  public String toString() {
+    return new StringBuilder().append("{").
+      append("poolName:").append(poolName).
+      append(", ownerName:").append(ownerName).
+      append(", groupName:").append(groupName).
+      append(", mode:").append((mode == null) ? "null" :
+          String.format("0%03o", mode)).
+      append(", weight:").append(weight).
+      append("}").toString();
   }
-
+  
   @Override
-  public boolean equals(Object obj) {
-    if (obj == null) { return false; }
-    if (obj == this) { return true; }
-    if (obj.getClass() != getClass()) {
+  public boolean equals(Object o) {
+    try {
+      CachePoolInfo other = (CachePoolInfo)o;
+      return new EqualsBuilder().
+          append(poolName, other.poolName).
+          append(ownerName, other.ownerName).
+          append(groupName, other.groupName).
+          append(mode, other.mode).
+          append(weight, other.weight).
+          isEquals();
+    } catch (ClassCastException e) {
       return false;
     }
-    CachePoolInfo rhs = (CachePoolInfo)obj;
-    return new EqualsBuilder()
-      .append(poolName, rhs.poolName)
-      .append(ownerName, rhs.ownerName)
-      .append(groupName, rhs.groupName)
-      .append(mode, rhs.mode)
-      .append(weight, rhs.weight)
-      .isEquals();
-  }
-
-  public static Builder newBuilder() {
-    return new Builder();
-  }
-
-  public static Builder newBuilder(CachePoolInfo info) {
-    return new Builder(info);
   }
 
-  /**
-   * CachePoolInfo Builder
-   */
-  public static class Builder {
-    private CachePoolInfo info;
-
-    public Builder() {
-      this.info = new CachePoolInfo();
-    }
-
-    public Builder(CachePoolInfo info) {
-      this.info = info;
-    }
-
-    public CachePoolInfo build() {
-      Preconditions.checkNotNull(info.poolName,
-          "Cannot create a CachePoolInfo without a pool name");
-      return info;
-    }
-
-    public Builder setPoolName(String poolName) {
-      info.poolName = poolName;
-      return this;
-    }
-
-    public Builder setOwnerName(String ownerName) {
-      info.ownerName = ownerName;
-      return this;
-    }
-
-    public Builder setGroupName(String groupName) {
-      info.groupName = groupName;
-      return this;
-    }
-
-    public Builder setMode(FsPermission mode) {
-      info.mode = mode;
-      return this;
-    }
-
-    public Builder setWeight(Integer weight) {
-      info.weight = weight;
-      return this;
-    }
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder().
+        append(poolName).
+        append(ownerName).
+        append(groupName).
+        append(mode).
+        append(weight).
+        hashCode();
   }
-
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java Fri Sep  6 18:52:50 2013
@@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.DFSConfigK
 import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector;
-import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
 import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
 import org.apache.hadoop.io.EnumSetWritable;
@@ -1107,8 +1106,9 @@ public interface ClientProtocol {
    *         could not be added.
    */
   @AtMostOnce
-  public List<Fallible<PathCacheEntry>> addPathCacheDirectives(
-      List<PathCacheDirective> directives) throws IOException;
+  public List<Fallible<PathCacheEntry>>
+    addPathCacheDirectives(List<PathCacheDirective> directives)
+      throws IOException;
 
   /**
    * Remove some path cache entries from the CacheManager.
@@ -1117,7 +1117,7 @@ public interface ClientProtocol {
    * @return A Fallible list where each element is either a successfully removed
    *         ID, or an IOException describing why the ID could not be removed.
    */
-  @Idempotent
+  @AtMostOnce
   public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
       throws IOException;
 
@@ -1127,15 +1127,13 @@ public interface ClientProtocol {
    * 
    * @param prevId The last listed entry ID, or -1 if this is the first call to
    *          listPathCacheEntries.
-   * @param pool The cache pool to list, or -1 to list all pools
-   * @param maxRepliesPerRequest The maximum number of entries to return per
-   *          request
+   * @param pool The cache pool to list, or the empty string to list all pools
    * @return A RemoteIterator which returns PathCacheEntry objects.
    */
   @Idempotent
   public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
-      long poolId, int maxRepliesPerRequest) throws IOException;
-
+      String pool) throws IOException;
+  
   /**
    * Add a new cache pool.
    * 
@@ -1143,39 +1141,37 @@ public interface ClientProtocol {
    * @throws IOException If the request could not be completed.
    */
   @AtMostOnce
-  public CachePool addCachePool(CachePoolInfo info) throws IOException;
+  public void addCachePool(CachePoolInfo info) throws IOException;
 
   /**
-   * Modify a cache pool, e.g. pool name, permissions, owner, group.
-   * 
-   * @param poolId ID of the cache pool to modify
-   * @param info New metadata for the cache pool
-   * @throws IOException If the request could not be completed.
+   * Modify a cache pool.
+   *
+   * @param req
+   *          The request to modify a cache pool.
+   * @throws IOException 
+   *          If the request could not be completed.
    */
   @AtMostOnce
-  public void modifyCachePool(long poolId, CachePoolInfo info)
-      throws IOException;
-
+  public void modifyCachePool(CachePoolInfo req) throws IOException;
+  
   /**
    * Remove a cache pool.
    * 
-   * @param poolId ID of the cache pool to remove.
+   * @param pool name of the cache pool to remove.
    * @throws IOException if the cache pool did not exist, or could not be
    *           removed.
    */
-  @Idempotent
-  public void removeCachePool(long poolId) throws IOException;
+  @AtMostOnce
+  public void removeCachePool(String pool) throws IOException;
 
   /**
    * List the set of cache pools. Incrementally fetches results from the server.
    * 
-   * @param prevPoolId ID of the last pool listed, or -1 if this is the first
-   *          invocation of listCachePools
-   * @param maxRepliesPerRequest Maximum number of cache pools to return per
-   *          server request.
+   * @param prevPool name of the last pool listed, or the empty string if this is
+   *          the first invocation of listCachePools
    * @return A RemoteIterator which returns CachePool objects.
    */
   @Idempotent
-  public RemoteIterator<CachePool> listCachePools(long prevPoolId,
-      int maxRepliesPerRequest) throws IOException;
+  public RemoteIterator<CachePoolInfo> listCachePools(String prevPool)
+      throws IOException;
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java Fri Sep  6 18:52:50 2013
@@ -25,7 +25,7 @@ import com.google.common.collect.Compari
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolError;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
 
 /**
@@ -33,13 +33,14 @@ import org.apache.hadoop.hdfs.protocol.A
  */
 public class PathCacheDirective implements Comparable<PathCacheDirective> {
   private final String path;
-  private final long poolId;
 
-  public PathCacheDirective(String path, long poolId) {
+  private final String pool;
+
+  public PathCacheDirective(String path, String pool) {
     Preconditions.checkNotNull(path);
-    Preconditions.checkArgument(poolId > 0);
+    Preconditions.checkNotNull(pool);
     this.path = path;
-    this.poolId = poolId;
+    this.pool = pool;
   }
 
   /**
@@ -52,8 +53,8 @@ public class PathCacheDirective implemen
   /**
    * @return The pool used in this request.
    */
-  public long getPoolId() {
-    return poolId;
+  public String getPool() {
+    return pool;
   }
 
   /**
@@ -69,22 +70,22 @@ public class PathCacheDirective implemen
     if (!DFSUtil.isValidName(path)) {
       throw new InvalidPathNameError(this);
     }
-    if (poolId <= 0) {
-      throw new InvalidPoolError(this);
+    if (pool.isEmpty()) {
+      throw new InvalidPoolNameError(this);
     }
   }
 
   @Override
   public int compareTo(PathCacheDirective rhs) {
     return ComparisonChain.start().
-        compare(poolId, rhs.getPoolId()).
+        compare(pool, rhs.getPool()).
         compare(path, rhs.getPath()).
         result();
   }
 
   @Override
   public int hashCode() {
-    return new HashCodeBuilder().append(path).append(poolId).hashCode();
+    return new HashCodeBuilder().append(path).append(pool).hashCode();
   }
 
   @Override
@@ -101,7 +102,7 @@ public class PathCacheDirective implemen
   public String toString() {
     StringBuilder builder = new StringBuilder();
     builder.append("{ path:").append(path).
-      append(", poolId:").append(poolId).
+      append(", pool:").append(pool).
       append(" }");
     return builder.toString();
   }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java Fri Sep  6 18:52:50 2013
@@ -27,9 +27,11 @@ import org.apache.hadoop.fs.ContentSumma
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolError;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DirectoryListing;
@@ -112,6 +114,7 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
@@ -171,6 +174,7 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
+import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
 import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
@@ -1035,19 +1039,16 @@ public class ClientNamenodeProtocolServe
   }
 
   @Override
-  public AddPathCacheDirectivesResponseProto addPathCacheDirectives(
-      RpcController controller, AddPathCacheDirectivesRequestProto request)
-          throws ServiceException {
+  public AddPathCacheDirectivesResponseProto addPathCacheDirectives(RpcController controller,
+      AddPathCacheDirectivesRequestProto request) throws ServiceException {
     try {
       ArrayList<PathCacheDirective> input =
           new ArrayList<PathCacheDirective>(request.getElementsCount());
       for (int i = 0; i < request.getElementsCount(); i++) {
         PathCacheDirectiveProto proto = request.getElements(i);
-        input.add(new PathCacheDirective(proto.getPath(),
-            proto.getPool().getId()));
+        input.add(new PathCacheDirective(proto.getPath(), proto.getPool()));
       }
-      List<Fallible<PathCacheEntry>> output = server
-          .addPathCacheDirectives(input);
+      List<Fallible<PathCacheEntry>> output = server.addPathCacheDirectives(input);
       AddPathCacheDirectivesResponseProto.Builder builder =
          AddPathCacheDirectivesResponseProto.newBuilder();
       for (int idx = 0; idx < output.size(); idx++) {
@@ -1060,7 +1061,7 @@ public class ClientNamenodeProtocolServe
         } catch (InvalidPathNameError ioe) {
           builder.addResults(AddPathCacheDirectiveErrorProto.
               INVALID_PATH_NAME_ERROR_VALUE);
-        } catch (InvalidPoolError ioe) {
+        } catch (InvalidPoolNameError ioe) {
           builder.addResults(AddPathCacheDirectiveErrorProto.
               INVALID_POOL_NAME_ERROR_VALUE);
         } catch (IOException ioe) {
@@ -1108,21 +1109,20 @@ public class ClientNamenodeProtocolServe
   }
 
   @Override
-  public ListPathCacheEntriesResponseProto listPathCacheEntries(
-      RpcController controller, ListPathCacheEntriesRequestProto request)
-      throws ServiceException {
+  public ListPathCacheEntriesResponseProto listPathCacheEntries(RpcController controller,
+      ListPathCacheEntriesRequestProto request) throws ServiceException {
     try {
-      CachePool pool = PBHelper.convert(request.getPool());
       RemoteIterator<PathCacheEntry> iter =
-         server.listPathCacheEntries(
-             PBHelper.convert(request.getPrevEntry()).getEntryId(),
-             pool.getId(),
-             request.getMaxReplies());
+         server.listPathCacheEntries(request.getPrevId(), request.getPool());
       ListPathCacheEntriesResponseProto.Builder builder =
           ListPathCacheEntriesResponseProto.newBuilder();
       while (iter.hasNext()) {
         PathCacheEntry entry = iter.next();
-        builder.addEntries(PBHelper.convert(entry));
+        builder.addElements(
+            ListPathCacheEntriesElementProto.newBuilder().
+              setId(entry.getEntryId()).
+              setPath(entry.getDirective().getPath()).
+              setPool(entry.getDirective().getPool()));
       }
       return builder.build();
     } catch (IOException e) {
@@ -1134,20 +1134,46 @@ public class ClientNamenodeProtocolServe
   public AddCachePoolResponseProto addCachePool(RpcController controller,
       AddCachePoolRequestProto request) throws ServiceException {
     try {
-      server.addCachePool(PBHelper.convert(request.getInfo()));
+      CachePoolInfo info =
+          new CachePoolInfo(request.getPoolName());
+      if (request.hasOwnerName()) {
+        info.setOwnerName(request.getOwnerName());
+      }
+      if (request.hasGroupName()) {
+        info.setGroupName(request.getGroupName());
+      }
+      if (request.hasMode()) {
+        info.setMode(new FsPermission((short)request.getMode()));
+      }
+      if (request.hasWeight()) {
+        info.setWeight(request.getWeight());
+      }
+      server.addCachePool(info);
       return AddCachePoolResponseProto.newBuilder().build();
     } catch (IOException e) {
       throw new ServiceException(e);
     }
   }
-
+  
   @Override
   public ModifyCachePoolResponseProto modifyCachePool(RpcController controller,
       ModifyCachePoolRequestProto request) throws ServiceException {
     try {
-      server.modifyCachePool(
-          PBHelper.convert(request.getPool()).getId(),
-          PBHelper.convert(request.getInfo()));
+      CachePoolInfo info =
+          new CachePoolInfo(request.getPoolName());
+      if (request.hasOwnerName()) {
+        info.setOwnerName(request.getOwnerName());
+      }
+      if (request.hasGroupName()) {
+        info.setGroupName(request.getGroupName());
+      }
+      if (request.hasMode()) {
+        info.setMode(new FsPermission((short)request.getMode()));
+      }
+      if (request.hasWeight()) {
+        info.setWeight(request.getWeight());
+      }
+      server.modifyCachePool(info);
       return ModifyCachePoolResponseProto.newBuilder().build();
     } catch (IOException e) {
       throw new ServiceException(e);
@@ -1158,7 +1184,7 @@ public class ClientNamenodeProtocolServe
   public RemoveCachePoolResponseProto removeCachePool(RpcController controller,
       RemoveCachePoolRequestProto request) throws ServiceException {
     try {
-      server.removeCachePool(PBHelper.convert(request.getPool()).getId());
+      server.removeCachePool(request.getPoolName());
       return RemoveCachePoolResponseProto.newBuilder().build();
     } catch (IOException e) {
       throw new ServiceException(e);
@@ -1169,16 +1195,27 @@ public class ClientNamenodeProtocolServe
   public ListCachePoolsResponseProto listCachePools(RpcController controller,
       ListCachePoolsRequestProto request) throws ServiceException {
     try {
-      RemoteIterator<CachePool> iter =
-        server.listCachePools(PBHelper.convert(request.getPrevPool()).getId(),
-            request.getMaxReplies());
+      RemoteIterator<CachePoolInfo> iter =
+        server.listCachePools(request.getPrevPoolName());
       ListCachePoolsResponseProto.Builder responseBuilder =
         ListCachePoolsResponseProto.newBuilder();
       while (iter.hasNext()) {
-        CachePool pool = iter.next();
-        ListCachePoolsResponseElementProto.Builder elemBuilder =
+        CachePoolInfo pool = iter.next();
+        ListCachePoolsResponseElementProto.Builder elemBuilder = 
             ListCachePoolsResponseElementProto.newBuilder();
-        elemBuilder.setPool(PBHelper.convert(pool));
+        elemBuilder.setPoolName(pool.getPoolName());
+        if (pool.getOwnerName() != null) {
+          elemBuilder.setOwnerName(pool.getOwnerName());
+        }
+        if (pool.getGroupName() != null) {
+          elemBuilder.setGroupName(pool.getGroupName());
+        }
+        if (pool.getMode() != null) {
+          elemBuilder.setMode(pool.getMode().toShort());
+        }
+        if (pool.getWeight() != null) {
+          elemBuilder.setWeight(pool.getWeight());
+        }
         responseBuilder.addElements(elemBuilder.build());
       }
       return responseBuilder.build();

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java Fri Sep  6 18:52:50 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.NoSuchElementException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -37,12 +38,17 @@ import org.apache.hadoop.fs.ParentNotDir
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
+import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolError;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
 import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
+import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -55,18 +61,14 @@ import org.apache.hadoop.hdfs.protocol.H
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectiveErrorProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesResponseProto;
@@ -107,23 +109,23 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseElementProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheEntryProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntryErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameSnapshotRequestProto;
@@ -144,7 +146,6 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
 import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
 import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
 import org.apache.hadoop.io.EnumSetWritable;
@@ -1026,7 +1027,7 @@ public class ClientNamenodeProtocolTrans
       return new InvalidPathNameError(directive);
     } else if (code == AddPathCacheDirectiveErrorProto.
         INVALID_POOL_NAME_ERROR_VALUE) {
-      return new InvalidPoolError(directive);
+      return new InvalidPoolNameError(directive);
     } else {
       return new UnexpectedAddPathCacheDirectiveException(directive);
     }
@@ -1041,7 +1042,7 @@ public class ClientNamenodeProtocolTrans
       for (PathCacheDirective directive : directives) {
         builder.addElements(PathCacheDirectiveProto.newBuilder().
             setPath(directive.getPath()).
-            setPool(PBHelper.convert(new CachePool(directive.getPoolId()))).
+            setPool(directive.getPool()).
             build());
       }
       AddPathCacheDirectivesResponseProto result = 
@@ -1120,40 +1121,45 @@ public class ClientNamenodeProtocolTrans
 
     @Override
     public PathCacheEntry get(int i) {
-      PathCacheEntryProto entryProto = response.getEntries(i);
-      return PBHelper.convert(entryProto);
+      ListPathCacheEntriesElementProto elementProto =
+        response.getElements(i);
+      return new PathCacheEntry(elementProto.getId(), 
+          new PathCacheDirective(elementProto.getPath(),
+              elementProto.getPool()));
     }
 
     @Override
     public int size() {
-      return response.getEntriesCount();
+      return response.getElementsCount();
+    }
+    
+    @Override
+    public boolean hasMore() {
+      return response.getHasMore();
     }
   }
 
   private class PathCacheEntriesIterator
       extends BatchedRemoteIterator<Long, PathCacheEntry> {
-    private final long poolId;
+    private final String pool;
 
-    public PathCacheEntriesIterator(long prevKey, int maxRepliesPerRequest,
-        long poolId) {
-      super(prevKey, maxRepliesPerRequest);
-      this.poolId = poolId;
+    public PathCacheEntriesIterator(long prevKey, String pool) {
+      super(prevKey);
+      this.pool = pool;
     }
 
     @Override
     public BatchedEntries<PathCacheEntry> makeRequest(
-        Long prevEntryId, int maxRepliesPerRequest) throws IOException {
+        Long nextKey) throws IOException {
       ListPathCacheEntriesResponseProto response;
       try {
         ListPathCacheEntriesRequestProto req =
             ListPathCacheEntriesRequestProto.newBuilder().
-              setPrevEntry(
-                  PBHelper.convert(new PathCacheEntry(prevEntryId, null))).
-              setPool(PBHelper.convert(new CachePool(poolId))).
-              setMaxReplies(maxRepliesPerRequest).
+              setPrevId(nextKey).
+              setPool(pool).
               build();
         response = rpcProxy.listPathCacheEntries(null, req);
-        if (response.getEntriesCount() == 0) {
+        if (response.getElementsCount() == 0) {
           response = null;
         }
       } catch (ServiceException e) {
@@ -1170,30 +1176,51 @@ public class ClientNamenodeProtocolTrans
 
   @Override
   public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
-      long poolId, int repliesPerRequest) throws IOException {
-    return new PathCacheEntriesIterator(prevId, repliesPerRequest, poolId);
+      String pool) throws IOException {
+    return new PathCacheEntriesIterator(prevId, pool);
   }
 
   @Override
-  public CachePool addCachePool(CachePoolInfo info) throws IOException {
-    AddCachePoolRequestProto.Builder builder =
+  public void addCachePool(CachePoolInfo info) throws IOException {
+    AddCachePoolRequestProto.Builder builder = 
         AddCachePoolRequestProto.newBuilder();
-    builder.setInfo(PBHelper.convert(info));
+    builder.setPoolName(info.getPoolName());
+    if (info.getOwnerName() != null) {
+      builder.setOwnerName(info.getOwnerName());
+    }
+    if (info.getGroupName() != null) {
+      builder.setGroupName(info.getGroupName());
+    }
+    if (info.getMode() != null) {
+      builder.setMode(info.getMode().toShort());
+    }
+    if (info.getWeight() != null) {
+      builder.setWeight(info.getWeight());
+    }
     try {
-      return PBHelper.convert(
-          rpcProxy.addCachePool(null, builder.build()).getPool());
+      rpcProxy.addCachePool(null, builder.build());
     } catch (ServiceException e) {
       throw ProtobufHelper.getRemoteException(e);
     }
   }
 
   @Override
-  public void modifyCachePool(long poolId, CachePoolInfo info)
-      throws IOException {
-    ModifyCachePoolRequestProto.Builder builder =
-        ModifyCachePoolRequestProto.newBuilder()
-        .setPool(PBHelper.convert(new CachePool(poolId)))
-        .setInfo(PBHelper.convert(info));
+  public void modifyCachePool(CachePoolInfo req) throws IOException {
+    ModifyCachePoolRequestProto.Builder builder = 
+        ModifyCachePoolRequestProto.newBuilder();
+    builder.setPoolName(req.getPoolName());
+    if (req.getOwnerName() != null) {
+      builder.setOwnerName(req.getOwnerName());
+    }
+    if (req.getGroupName() != null) {
+      builder.setGroupName(req.getGroupName());
+    }
+    if (req.getMode() != null) {
+      builder.setMode(req.getMode().toShort());
+    }
+    if (req.getWeight() != null) {
+      builder.setWeight(req.getWeight());
+    }
     try {
       rpcProxy.modifyCachePool(null, builder.build());
     } catch (ServiceException e) {
@@ -1202,69 +1229,74 @@ public class ClientNamenodeProtocolTrans
   }
 
   @Override
-  public void removeCachePool(long poolId) throws IOException {
+  public void removeCachePool(String cachePoolName) throws IOException {
     try {
-      rpcProxy.removeCachePool(null,
+      rpcProxy.removeCachePool(null, 
           RemoveCachePoolRequestProto.newBuilder().
-          setPool(PBHelper.convert(new CachePool(poolId))).
-          build());
+            setPoolName(cachePoolName).build());
     } catch (ServiceException e) {
       throw ProtobufHelper.getRemoteException(e);
     }
   }
 
   private static class BatchedPathDirectiveEntries
-      implements BatchedEntries<CachePool> {
-
+      implements BatchedEntries<CachePoolInfo> {
     private final ListCachePoolsResponseProto proto;
-
+    
     public BatchedPathDirectiveEntries(ListCachePoolsResponseProto proto) {
       this.proto = proto;
     }
-
+      
     @Override
-    public CachePool get(int i) {
+    public CachePoolInfo get(int i) {
       ListCachePoolsResponseElementProto elem = proto.getElements(i);
-      return PBHelper.convert(elem.getPool());
+      return new CachePoolInfo(elem.getPoolName()).
+          setOwnerName(elem.getOwnerName()).
+          setGroupName(elem.getGroupName()).
+          setMode(new FsPermission((short)elem.getMode())).
+          setWeight(elem.getWeight());
     }
 
     @Override
     public int size() {
       return proto.getElementsCount();
     }
+    
+    @Override
+    public boolean hasMore() {
+      return proto.getHasMore();
+    }
   }
+  
+  private class CachePoolIterator 
+      extends BatchedRemoteIterator<String, CachePoolInfo> {
 
-  private class CachePoolIterator
-      extends BatchedRemoteIterator<Long, CachePool> {
-
-    public CachePoolIterator(Long prevKey, int maxRepliesPerRequest) {
-      super(prevKey, maxRepliesPerRequest);
+    public CachePoolIterator(String prevKey) {
+      super(prevKey);
     }
 
     @Override
-    public BatchedEntries<CachePool> makeRequest(Long prevKey,
-        int maxRepliesPerRequest) throws IOException {
+    public BatchedEntries<CachePoolInfo> makeRequest(String prevKey)
+        throws IOException {
       try {
         return new BatchedPathDirectiveEntries(
-            rpcProxy.listCachePools(null,
+            rpcProxy.listCachePools(null, 
               ListCachePoolsRequestProto.newBuilder().
-                setPrevPool(PBHelper.convert(new CachePool(prevKey))).
-                setMaxReplies(maxRepliesPerRequest).
-                build()));
+                setPrevPoolName(prevKey).build()));
       } catch (ServiceException e) {
         throw ProtobufHelper.getRemoteException(e);
       }
     }
 
     @Override
-    public Long elementToPrevKey(CachePool element) {
-      return element.getId();
+    public String elementToPrevKey(CachePoolInfo element) {
+      return element.getPoolName();
     }
   }
 
   @Override
-  public RemoteIterator<CachePool> listCachePools(long prevPoolId,
-      int maxRepliesPerRequest) throws IOException {
-    return new CachePoolIterator(prevPoolId, maxRepliesPerRequest);
+  public RemoteIterator<CachePoolInfo> listCachePools(String prevKey)
+      throws IOException {
+    return new CachePoolIterator(prevKey);
   }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java Fri Sep  6 18:52:50 2013
@@ -32,13 +32,10 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
 import org.apache.hadoop.hdfs.protocol.DirectoryListing;
@@ -53,15 +50,9 @@ import org.apache.hadoop.hdfs.protocol.L
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateFlagProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DatanodeReportTypeProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatsResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheEntryProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SafeModeActionProto;
 import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BalancerBandwidthCommandProto;
 import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockCommandProto;
@@ -123,7 +114,6 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
-import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand;
@@ -1503,74 +1493,6 @@ public class PBHelper {
     return HdfsProtos.ChecksumTypeProto.valueOf(type.id);
   }
 
-  public static PathCacheDirective convert(
-      PathCacheDirectiveProto directiveProto) {
-    CachePool pool = convert(directiveProto.getPool());
-    return new PathCacheDirective(directiveProto.getPath(), pool.getId());
-  }
-
-  public static PathCacheDirectiveProto convert(PathCacheDirective directive) {
-    PathCacheDirectiveProto.Builder builder = 
-        PathCacheDirectiveProto.newBuilder()
-        .setPath(directive.getPath())
-        .setPool(PBHelper.convert(new CachePool(directive.getPoolId())));
-    return builder.build();
-  }
-
-  public static PathCacheEntry convert(PathCacheEntryProto entryProto) {
-    long entryId = entryProto.getId();
-    PathCacheDirective directive = convert(entryProto.getDirective());
-    return new PathCacheEntry(entryId, directive);
-  }
-
-  public static PathCacheEntryProto convert(PathCacheEntry entry) {
-    PathCacheEntryProto.Builder builder = PathCacheEntryProto.newBuilder()
-        .setId(entry.getEntryId())
-        .setDirective(PBHelper.convert(entry.getDirective()));
-    return builder.build();
-  }
-
-  public static CachePoolInfo convert(CachePoolInfoProto infoProto) {
-    CachePoolInfo.Builder builder =
-        CachePoolInfo.newBuilder().setPoolName(infoProto.getPoolName());
-    if (infoProto.hasOwnerName()) {
-      builder.setOwnerName(infoProto.getOwnerName());
-    }
-    if (infoProto.hasGroupName()) {
-      builder.setGroupName(infoProto.getGroupName());
-    }
-    if (infoProto.hasMode()) {
-      builder.setMode(new FsPermission((short) infoProto.getMode()));
-    }
-    if (infoProto.hasWeight()) {
-      builder.setWeight(infoProto.getWeight());
-    }
-    return builder.build();
-  }
-
-  public static CachePoolInfoProto convert(CachePoolInfo info) {
-    CachePoolInfoProto.Builder builder = CachePoolInfoProto.newBuilder()
-        .setPoolName(info.getPoolName())
-        .setOwnerName(info.getOwnerName())
-        .setGroupName(info.getGroupName())
-        .setMode(info.getMode().toShort())
-        .setWeight(info.getWeight());
-    return builder.build();
-  }
-
-  public static CachePool convert(CachePoolProto poolProto) {
-    CachePoolInfo info = convert(poolProto.getInfo());
-    CachePool pool = new CachePool(poolProto.getId(), info);
-    return pool;
-  }
-
-  public static CachePoolProto convert(CachePool pool) {
-    CachePoolProto.Builder builder = CachePoolProto.newBuilder()
-        .setId(pool.getId())
-        .setInfo(convert(pool.getInfo()));
-    return builder.build();
-  }
-
   public static InputStream vintPrefixed(final InputStream input)
       throws IOException {
     final int firstByte = input.read();

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java Fri Sep  6 18:52:50 2013
@@ -17,28 +17,34 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.SortedMap;
 import java.util.TreeMap;
+import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
 import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
 import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
 import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
 import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
 import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
+import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
 import org.apache.hadoop.util.Fallible;
 
 /**
@@ -65,62 +71,58 @@ final class CacheManager {
   /**
    * Cache pools, sorted by name.
    */
-  private final TreeMap<String, CachePool> cachePoolsByName =
+  private final TreeMap<String, CachePool> cachePools =
       new TreeMap<String, CachePool>();
 
   /**
-   * Cache pools, sorted by ID
+   * The entry ID to use for a new entry.
    */
-  private final TreeMap<Long, CachePool> cachePoolsById =
-      new TreeMap<Long, CachePool>();
+  private long nextEntryId;
 
   /**
-   * The entry ID to use for a new entry.
+   * Maximum number of cache pools to list in one operation.
    */
-  private long nextEntryId;
+  private final int maxListCachePoolsResponses;
 
   /**
-   * The pool ID to use for a new pool.
+   * Maximum number of cache pool directives to list in one operation.
    */
-  private long nextPoolId;
+  private final int maxListCacheDirectivesResponses;
 
   CacheManager(FSDirectory dir, Configuration conf) {
     // TODO: support loading and storing of the CacheManager state
     clear();
+    maxListCachePoolsResponses = conf.getInt(
+        DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES,
+        DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT);
+    maxListCacheDirectivesResponses = conf.getInt(
+        DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES,
+        DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT);
   }
 
   synchronized void clear() {
     entriesById.clear();
     entriesByDirective.clear();
-    cachePoolsByName.clear();
-    cachePoolsById.clear();
+    cachePools.clear();
     nextEntryId = 1;
-    nextPoolId = 1;
   }
 
   synchronized long getNextEntryId() throws IOException {
     if (nextEntryId == Long.MAX_VALUE) {
-      throw new IOException("no more available entry IDs");
+      throw new IOException("no more available IDs");
     }
     return nextEntryId++;
   }
 
-  synchronized long getNextPoolId() throws IOException {
-    if (nextPoolId == Long.MAX_VALUE) {
-      throw new IOException("no more available pool IDs");
-    }
-    return nextPoolId++;
-  }
-
   private synchronized Fallible<PathCacheEntry> addDirective(
-        FSPermissionChecker pc, PathCacheDirective directive) {
-    CachePool pool = cachePoolsById.get(directive.getPoolId());
+        PathCacheDirective directive, FSPermissionChecker pc) {
+    CachePool pool = cachePools.get(directive.getPool());
     if (pool == null) {
       LOG.info("addDirective " + directive + ": pool not found.");
       return new Fallible<PathCacheEntry>(
-          new InvalidPoolError(directive));
+          new InvalidPoolNameError(directive));
     }
-    if (!pc.checkPermission(pool, FsAction.WRITE)) {
+    if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("addDirective " + directive + ": write permission denied.");
       return new Fallible<PathCacheEntry>(
           new PoolWritePermissionDeniedError(directive));
@@ -155,17 +157,17 @@ final class CacheManager {
   }
 
   public synchronized List<Fallible<PathCacheEntry>> addDirectives(
-      FSPermissionChecker pc, List<PathCacheDirective> directives) {
+      List<PathCacheDirective> directives, FSPermissionChecker pc) {
     ArrayList<Fallible<PathCacheEntry>> results = 
         new ArrayList<Fallible<PathCacheEntry>>(directives.size());
     for (PathCacheDirective directive: directives) {
-      results.add(addDirective(pc, directive));
+      results.add(addDirective(directive, pc));
     }
     return results;
   }
 
-  private synchronized Fallible<Long> removeEntry(FSPermissionChecker pc,
-        long entryId) {
+  private synchronized Fallible<Long> removeEntry(long entryId,
+        FSPermissionChecker pc) {
     // Check for invalid IDs.
     if (entryId <= 0) {
       LOG.info("removeEntry " + entryId + ": invalid non-positive entry ID.");
@@ -177,20 +179,20 @@ final class CacheManager {
       LOG.info("removeEntry " + entryId + ": entry not found.");
       return new Fallible<Long>(new NoSuchIdException(entryId));
     }
-    CachePool pool = cachePoolsById.get(existing.getDirective().getPoolId());
+    CachePool pool = cachePools.get(existing.getDirective().getPool());
     if (pool == null) {
       LOG.info("removeEntry " + entryId + ": pool not found for directive " +
         existing.getDirective());
       return new Fallible<Long>(
           new UnexpectedRemovePathCacheEntryException(entryId));
     }
-    if (!pc.checkPermission(pool, FsAction.WRITE)) {
+    if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("removeEntry " + entryId + ": write permission denied to " +
           "pool " + pool + " for entry " + existing);
       return new Fallible<Long>(
           new RemovePermissionDeniedException(entryId));
     }
-
+    
     // Remove the corresponding entry in entriesByDirective.
     if (entriesByDirective.remove(existing.getDirective()) == null) {
       LOG.warn("removeEntry " + entryId + ": failed to find existing entry " +
@@ -202,41 +204,43 @@ final class CacheManager {
     return new Fallible<Long>(entryId);
   }
 
-  public synchronized List<Fallible<Long>> removeEntries(FSPermissionChecker pc,
-      List<Long> entryIds) {
+  public synchronized List<Fallible<Long>> removeEntries(List<Long> entryIds,
+      FSPermissionChecker pc) {
     ArrayList<Fallible<Long>> results = 
         new ArrayList<Fallible<Long>>(entryIds.size());
     for (Long entryId : entryIds) {
-      results.add(removeEntry(pc, entryId));
+      results.add(removeEntry(entryId, pc));
     }
     return results;
   }
 
-  public synchronized List<PathCacheEntry> listPathCacheEntries(
-      FSPermissionChecker pc, long prevId, Long poolId, int maxReplies) {
-    final int MAX_PRE_ALLOCATED_ENTRIES = 16;
-    ArrayList<PathCacheEntry> replies = new ArrayList<PathCacheEntry>(
-        Math.min(MAX_PRE_ALLOCATED_ENTRIES, maxReplies));
+  public synchronized BatchedListEntries<PathCacheEntry> 
+        listPathCacheEntries(long prevId, String filterPool, FSPermissionChecker pc) {
+    final int NUM_PRE_ALLOCATED_ENTRIES = 16;
+    ArrayList<PathCacheEntry> replies =
+        new ArrayList<PathCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
     int numReplies = 0;
     SortedMap<Long, PathCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
-    for (PathCacheEntry entry : tailMap.values()) {
-      if (numReplies >= maxReplies) {
-        return replies;
+    for (Entry<Long, PathCacheEntry> cur : tailMap.entrySet()) {
+      if (numReplies >= maxListCacheDirectivesResponses) {
+        return new BatchedListEntries<PathCacheEntry>(replies, true);
+      }
+      PathCacheEntry curEntry = cur.getValue();
+      if (!filterPool.isEmpty() && 
+          !cur.getValue().getDirective().getPool().equals(filterPool)) {
+        continue;
       }
-      long entryPoolId = entry.getDirective().getPoolId();
-      if (poolId == null || poolId <= 0 || entryPoolId == poolId) {
-        if (pc.checkPermission(
-            cachePoolsById.get(entryPoolId), FsAction.EXECUTE)) {
-          replies.add(entry);
-          numReplies++;
-        }
+      CachePool pool = cachePools.get(curEntry.getDirective().getPool());
+      if (pool == null) {
+        LOG.error("invalid pool for PathCacheEntry " + curEntry);
+        continue;
+      }
+      if (pc.checkPermission(pool, FsAction.EXECUTE)) {
+        replies.add(cur.getValue());
+        numReplies++;
       }
     }
-    return replies;
-  }
-
-  synchronized CachePool getCachePool(long id) {
-    return cachePoolsById.get(id);
+    return new BatchedListEntries<PathCacheEntry>(replies, false);
   }
 
   /**
@@ -246,24 +250,22 @@ final class CacheManager {
    *
    * @param info
    *          The info for the cache pool to create.
-   * @return created CachePool
    */
-  public synchronized CachePool addCachePool(CachePoolInfo info)
+  public synchronized void addCachePool(CachePoolInfo info)
       throws IOException {
     String poolName = info.getPoolName();
-    if (poolName == null || poolName.isEmpty()) {
+    if (poolName.isEmpty()) {
       throw new IOException("invalid empty cache pool name");
     }
-    if (cachePoolsByName.containsKey(poolName)) {
+    CachePool pool = cachePools.get(poolName);
+    if (pool != null) {
       throw new IOException("cache pool " + poolName + " already exists.");
     }
-    CachePool cachePool = new CachePool(getNextPoolId(), poolName,
+    CachePool cachePool = new CachePool(poolName,
       info.getOwnerName(), info.getGroupName(), info.getMode(),
       info.getWeight());
-    cachePoolsById.put(cachePool.getId(), cachePool);
-    cachePoolsByName.put(poolName, cachePool);
+    cachePools.put(poolName, cachePool);
     LOG.info("created new cache pool " + cachePool);
-    return cachePool;
   }
 
   /**
@@ -274,62 +276,46 @@ final class CacheManager {
    * @param info
    *          The info for the cache pool to modify.
    */
-  public synchronized void modifyCachePool(long poolId, CachePoolInfo info)
+  public synchronized void modifyCachePool(CachePoolInfo info)
       throws IOException {
-    if (poolId <= 0) {
-      throw new IOException("invalid pool id " + poolId);
+    String poolName = info.getPoolName();
+    if (poolName.isEmpty()) {
+      throw new IOException("invalid empty cache pool name");
     }
-    if (!cachePoolsById.containsKey(poolId)) {
-      throw new IOException("cache pool id " + poolId + " does not exist.");
+    CachePool pool = cachePools.get(poolName);
+    if (pool == null) {
+      throw new IOException("cache pool " + poolName + " does not exist.");
     }
-    CachePool pool = cachePoolsById.get(poolId);
-    // Remove the old CachePoolInfo
-    removeCachePool(poolId);
-    // Build up the new CachePoolInfo
-    CachePoolInfo.Builder newInfo = CachePoolInfo.newBuilder(pool.getInfo());
     StringBuilder bld = new StringBuilder();
     String prefix = "";
-    if (info.getPoolName() != null) {
-      newInfo.setPoolName(info.getPoolName());
-      bld.append(prefix).
-      append("set name to ").append(info.getOwnerName());
-      prefix = "; ";
-    }
     if (info.getOwnerName() != null) {
-      newInfo.setOwnerName(info.getOwnerName());
+      pool.setOwnerName(info.getOwnerName());
       bld.append(prefix).
         append("set owner to ").append(info.getOwnerName());
       prefix = "; ";
     }
     if (info.getGroupName() != null) {
-      newInfo.setGroupName(info.getGroupName());
+      pool.setGroupName(info.getGroupName());
       bld.append(prefix).
         append("set group to ").append(info.getGroupName());
       prefix = "; ";
     }
     if (info.getMode() != null) {
-      newInfo.setMode(info.getMode());
+      pool.setMode(info.getMode());
       bld.append(prefix).
-        append(String.format("set mode to ", info.getMode()));
+        append(String.format("set mode to 0%3o", info.getMode()));
       prefix = "; ";
     }
     if (info.getWeight() != null) {
-      newInfo.setWeight(info.getWeight());
+      pool.setWeight(info.getWeight());
       bld.append(prefix).
         append("set weight to ").append(info.getWeight());
       prefix = "; ";
     }
     if (prefix.isEmpty()) {
       bld.append("no changes.");
-    } else {
-      pool.setInfo(newInfo.build());
     }
-    // Put the newly modified info back in
-    cachePoolsById.put(poolId, pool);
-    cachePoolsByName.put(info.getPoolName(), pool);
-    LOG.info("modified pool id " + pool.getId()
-        + " (" + pool.getInfo().getPoolName() + "); "
-        + bld.toString());
+    LOG.info("modified " + poolName + "; " + bld.toString());
   }
 
   /**
@@ -337,39 +323,47 @@ final class CacheManager {
    * 
    * Only the superuser should be able to call this function.
    *
-   * @param poolId
-   *          The id of the cache pool to remove.
+   * @param poolName
+   *          The name for the cache pool to remove.
    */
-  public synchronized void removeCachePool(long poolId) throws IOException {
-    if (!cachePoolsById.containsKey(poolId)) {
-      throw new IOException("can't remove nonexistent cache pool id " + poolId);
-    }
-    // Remove all the entries associated with the pool
-    Iterator<Map.Entry<Long, PathCacheEntry>> it =
-        entriesById.entrySet().iterator();
-    while (it.hasNext()) {
-      Map.Entry<Long, PathCacheEntry> entry = it.next();
-      if (entry.getValue().getDirective().getPoolId() == poolId) {
-        it.remove();
-        entriesByDirective.remove(entry.getValue().getDirective());
+  public synchronized void removeCachePool(String poolName)
+      throws IOException {
+    CachePool pool = cachePools.remove(poolName);
+    if (pool == null) {
+      throw new IOException("can't remove nonexistent cache pool " + poolName);
+    }
+    
+    // Remove entries using this pool
+    // TODO: could optimize this somewhat to avoid the need to iterate
+    // over all entries in entriesByDirective
+    Iterator<Entry<PathCacheDirective, PathCacheEntry>> iter = 
+        entriesByDirective.entrySet().iterator();
+    while (iter.hasNext()) {
+      Entry<PathCacheDirective, PathCacheEntry> entry = iter.next();
+      if (entry.getKey().getPool().equals(poolName)) {
+        entriesById.remove(entry.getValue().getEntryId());
+        iter.remove();
       }
     }
-    // Remove the pool
-    CachePool pool = cachePoolsById.remove(poolId);
-    cachePoolsByName.remove(pool.getInfo().getPoolName());
   }
 
-  public synchronized List<CachePool> listCachePools(Long prevKey,
-      int maxRepliesPerRequest) {
-    final int MAX_PREALLOCATED_REPLIES = 16;
-    ArrayList<CachePool> results = 
-        new ArrayList<CachePool>(Math.min(MAX_PREALLOCATED_REPLIES,
-            maxRepliesPerRequest));
-    SortedMap<Long, CachePool> tailMap =
-        cachePoolsById.tailMap(prevKey, false);
-    for (CachePool pool : tailMap.values()) {
-      results.add(pool);
+  public synchronized BatchedListEntries<CachePoolInfo>
+      listCachePools(FSPermissionChecker pc, String prevKey) {
+    final int NUM_PRE_ALLOCATED_ENTRIES = 16;
+    ArrayList<CachePoolInfo> results = 
+        new ArrayList<CachePoolInfo>(NUM_PRE_ALLOCATED_ENTRIES);
+    SortedMap<String, CachePool> tailMap = cachePools.tailMap(prevKey, false);
+    int numListed = 0;
+    for (Entry<String, CachePool> cur : tailMap.entrySet()) {
+      if (numListed++ >= maxListCachePoolsResponses) {
+        return new BatchedListEntries<CachePoolInfo>(results, true);
+      }
+      if (pc == null) {
+        results.add(cur.getValue().getInfo(true));
+      } else {
+        results.add(cur.getValue().getInfo(pc));
+      }
     }
-    return results;
+    return new BatchedListEntries<CachePoolInfo>(results, false);
   }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java?rev=1520665&r1=1520664&r2=1520665&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java Fri Sep  6 18:52:50 2013
@@ -19,119 +19,137 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.lang.builder.HashCodeBuilder;
+import javax.annotation.Nonnull;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo.Builder;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * A CachePool describes a set of cache resources being managed by the NameNode.
  * User caching requests are billed to the cache pool specified in the request.
  *
- * CachePools are uniquely identified by a numeric id as well as the
- * {@link CachePoolInfo} pool name. Mutable metadata is contained in
- * CachePoolInfo, including pool name, owner, group, and permissions.
- * See this class for more details.
+ * This is an internal class, only used on the NameNode.  For identifying or
+ * describing a cache pool to clients, please use CachePoolInfo.
  */
+@InterfaceAudience.Private
 public final class CachePool {
   public static final Log LOG = LogFactory.getLog(CachePool.class);
 
-  private final long id;
-
-  private CachePoolInfo info;
+  @Nonnull
+  private final String poolName;
 
-  public CachePool(long id) {
-    this.id = id;
-    this.info = null;
-  }
+  @Nonnull
+  private String ownerName;
 
-  CachePool(long id, String poolName, String ownerName, String groupName,
+  @Nonnull
+  private String groupName;
+  
+  @Nonnull
+  private FsPermission mode;
+  
+  private int weight;
+  
+  public CachePool(String poolName, String ownerName, String groupName,
       FsPermission mode, Integer weight) throws IOException {
-    this.id = id;
-    // Set CachePoolInfo default fields if null
-    if (poolName == null || poolName.isEmpty()) {
-      throw new IOException("invalid empty cache pool name");
-    }
+    this.poolName = poolName;
     UserGroupInformation ugi = null;
     if (ownerName == null) {
-      ugi = NameNode.getRemoteUser();
-      ownerName = ugi.getShortUserName();
+      if (ugi == null) {
+        ugi = NameNode.getRemoteUser();
+      }
+      this.ownerName = ugi.getShortUserName();
+    } else {
+      this.ownerName = ownerName;
     }
     if (groupName == null) {
       if (ugi == null) {
         ugi = NameNode.getRemoteUser();
       }
-      String[] groups = ugi.getGroupNames();
-      if (groups.length == 0) {
-        throw new IOException("failed to get group names from UGI " + ugi);
-      }
-      groupName = groups[0];
-    }
-    if (mode == null) {
-      mode = FsPermission.getDirDefault();
-    }
-    if (weight == null) {
-      weight = 100;
+      this.groupName = ugi.getPrimaryGroupName();
+    } else {
+      this.groupName = ownerName;
     }
-    CachePoolInfo.Builder builder = CachePoolInfo.newBuilder();
-    builder.setPoolName(poolName).setOwnerName(ownerName)
-        .setGroupName(groupName).setMode(mode).setWeight(weight);
-    this.info = builder.build();
+    this.mode = mode != null ? 
+        new FsPermission(mode): FsPermission.getCachePoolDefault();
+    this.weight = weight != null ? weight : 100;
   }
 
-  public CachePool(long id, CachePoolInfo info) {
-    this.id = id;
-    this.info = info;
+  public String getName() {
+    return poolName;
   }
 
-  /**
-   * @return id of the pool
-   */
-  public long getId() {
-    return id;
+  public String getOwnerName() {
+    return ownerName;
+  }
+
+  public CachePool setOwnerName(String ownerName) {
+    this.ownerName = ownerName;
+    return this;
   }
 
+  public String getGroupName() {
+    return groupName;
+  }
+
+  public CachePool setGroupName(String groupName) {
+    this.groupName = groupName;
+    return this;
+  }
+
+  public FsPermission getMode() {
+    return mode;
+  }
+
+  public CachePool setMode(FsPermission mode) {
+    this.mode = new FsPermission(mode);
+    return this;
+  }
+  
+  public int getWeight() {
+    return weight;
+  }
+
+  public CachePool setWeight(int weight) {
+    this.weight = weight;
+    return this;
+  }
+  
   /**
    * Get information about this cache pool.
    *
+   * @param fullInfo
+   *          If true, only the name will be returned (i.e., what you 
+   *          would get if you didn't have read permission for this pool.)
    * @return
    *          Cache pool information.
    */
-  public CachePoolInfo getInfo() {
-    return info;
+  public CachePoolInfo getInfo(boolean fullInfo) {
+    CachePoolInfo info = new CachePoolInfo(poolName);
+    if (!fullInfo) {
+      return info;
+    }
+    return info.setOwnerName(ownerName).
+        setGroupName(groupName).
+        setMode(new FsPermission(mode)).
+        setWeight(weight);
   }
 
-  void setInfo(CachePoolInfo info) {
-    this.info = info;
+  public CachePoolInfo getInfo(FSPermissionChecker pc) {
+    return getInfo(pc.checkPermission(this, FsAction.READ)); 
   }
 
   public String toString() {
     return new StringBuilder().
-        append("{ ").append("id:").append(id).
-        append(", info:").append(info.toString()).
+        append("{ ").append("poolName:").append(poolName).
+        append(", ownerName:").append(ownerName).
+        append(", groupName:").append(groupName).
+        append(", mode:").append(mode).
+        append(", weight:").append(weight).
         append(" }").toString();
   }
-
-  @Override
-  public int hashCode() {
-    return new HashCodeBuilder().append(id).append(info).hashCode();
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (obj == null) { return false; }
-    if (obj == this) { return true; }
-    if (obj.getClass() != getClass()) {
-      return false;
-    }
-    CachePool rhs = (CachePool)obj;
-    return new EqualsBuilder()
-      .append(id, rhs.id)
-      .append(info, rhs.info)
-      .isEquals();
-  }
 }