You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by wa...@apache.org on 2013/09/21 02:20:37 UTC

svn commit: r1525183 [1/2] - in /hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/protocol/ src/main/java/org/apache/hadoop/hdfs/protocolPB/ src/main/java/or...

Author: wang
Date: Sat Sep 21 00:20:36 2013
New Revision: 1525183

URL: http://svn.apache.org/r1525183
Log:
HDFS-5236. Change PathBasedCacheDirective APIs to be a single value rather than batch. (Contributed by Andrew Wang)

Modified:
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheDescriptorException.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java
    hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt Sat Sep 21 00:20:36 2013
@@ -42,6 +42,9 @@ HDFS-4949 (Unreleased)
     HDFS-5213. Separate PathBasedCacheEntry and PathBasedCacheDirectiveWithId.
     (Contributed by Colin Patrick McCabe)
 
+    HDFS-5236. Change PathBasedCacheDirective APIs to be a single value
+    rather than batch. (Contributed by Andrew Wang)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java Sat Sep 21 00:20:36 2013
@@ -98,6 +98,7 @@ import org.apache.hadoop.fs.MD5MD5CRC32C
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.MD5MD5CRC32GzipFileChecksum;
 import org.apache.hadoop.fs.Options;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.Path;
@@ -107,6 +108,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hdfs.client.ClientMmapManager;
 import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
 import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -115,6 +117,8 @@ import org.apache.hadoop.hdfs.protocol.D
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -2279,7 +2283,73 @@ public class DFSClient implements java.i
       throw re.unwrapRemoteException();
     }
   }
+
+  public PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective directive) throws IOException {
+    checkOpen();
+    try {
+      return namenode.addPathBasedCacheDirective(directive);
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
   
+  public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor)
+      throws IOException {
+    checkOpen();
+    try {
+      namenode.removePathBasedCacheDescriptor(descriptor.getEntryId());
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+  
+  public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(
+      String pool, String path) throws IOException {
+    checkOpen();
+    try {
+      return namenode.listPathBasedCacheDescriptors(0, pool, path);
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+
+  public void addCachePool(CachePoolInfo info) throws IOException {
+    checkOpen();
+    try {
+      namenode.addCachePool(info);
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+
+  public void modifyCachePool(CachePoolInfo info) throws IOException {
+    checkOpen();
+    try {
+      namenode.modifyCachePool(info);
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+
+  public void removeCachePool(String poolName) throws IOException {
+    checkOpen();
+    try {
+      namenode.removeCachePool(poolName);
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+
+  public RemoteIterator<CachePoolInfo> listCachePools() throws IOException {
+    checkOpen();
+    try {
+      return namenode.listCachePools("");
+    } catch (RemoteException re) {
+      throw re.unwrapRemoteException();
+    }
+  }
+
   /**
    * Save namespace image.
    * 

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Sat Sep 21 00:20:36 2013
@@ -79,7 +79,6 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.Fallible;
 import org.apache.hadoop.util.Progressable;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -1584,29 +1583,26 @@ public class DistributedFileSystem exten
   }
 
   /**
-   * Add some PathBasedCache directives.
+   * Add a new PathBasedCacheDirective.
    * 
-   * @param directives A list of PathBasedCache directives to be added.
-   * @return A Fallible list, where each element is either a successfully addded
-   *         PathBasedCache entry, or an IOException describing why the directive
-   *         could not be added.
+   * @param directive A PathBasedCacheDirectives to add
+   * @return PathBasedCacheDescriptor associated with the added directive
+   * @throws IOException if the directive could not be added
    */
-  public List<Fallible<PathBasedCacheDescriptor>>
-      addPathBasedCacheDirective(List<PathBasedCacheDirective> directives)
-          throws IOException {
-    return dfs.namenode.addPathBasedCacheDirectives(directives);
+  public PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective directive) throws IOException {
+    return dfs.addPathBasedCacheDirective(directive);
   }
   
   /**
-   * Remove some PathBasedCache entries.
+   * Remove a PathBasedCacheDescriptor.
    * 
-   * @param ids A list of all the entry IDs to be removed.
-   * @return A Fallible list where each element is either a successfully removed
-   *         ID, or an IOException describing why the ID could not be removed.
-   */
-  public List<Fallible<Long>>
-      removePathBasedCacheDescriptors(List<Long> ids) throws IOException {
-    return dfs.namenode.removePathBasedCacheDescriptors(ids);
+   * @param descriptor PathBasedCacheDescriptor to remove
+   * @throws IOException if the descriptor could not be removed
+   */
+  public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor)
+      throws IOException {
+    dfs.removePathBasedCacheDescriptor(descriptor);
   }
   
   /**
@@ -1619,43 +1615,46 @@ public class DistributedFileSystem exten
    */
   public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(
       String pool, String path) throws IOException {
-    return dfs.namenode.listPathBasedCacheDescriptors(0, pool, path);
+    return dfs.listPathBasedCacheDescriptors(pool, path);
   }
 
   /**
    * Add a cache pool.
    *
-   * @param req
+   * @param info
    *          The request to add a cache pool.
    * @throws IOException 
    *          If the request could not be completed.
    */
   public void addCachePool(CachePoolInfo info) throws IOException {
-    dfs.namenode.addCachePool(info);
+    CachePoolInfo.validate(info);
+    dfs.addCachePool(info);
   }
 
   /**
    * Modify an existing cache pool.
    *
-   * @param req
+   * @param info
    *          The request to modify a cache pool.
    * @throws IOException 
    *          If the request could not be completed.
    */
   public void modifyCachePool(CachePoolInfo info) throws IOException {
-    dfs.namenode.modifyCachePool(info);
+    CachePoolInfo.validate(info);
+    dfs.modifyCachePool(info);
   }
     
   /**
    * Remove a cache pool.
    *
-   * @param cachePoolName
+   * @param poolName
    *          Name of the cache pool to remove.
    * @throws IOException 
    *          if the cache pool did not exist, or could not be removed.
    */
-  public void removeCachePool(String name) throws IOException {
-    dfs.namenode.removeCachePool(name);
+  public void removeCachePool(String poolName) throws IOException {
+    CachePoolInfo.validateName(poolName);
+    dfs.removeCachePool(poolName);
   }
 
   /**
@@ -1667,6 +1666,6 @@ public class DistributedFileSystem exten
    *          If there was an error listing cache pools.
    */
   public RemoteIterator<CachePoolInfo> listCachePools() throws IOException {
-    return dfs.namenode.listCachePools("");
+    return dfs.listCachePools();
   }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java Sat Sep 21 00:20:36 2013
@@ -25,24 +25,20 @@ import java.io.IOException;
 public abstract class AddPathBasedCacheDirectiveException extends IOException {
   private static final long serialVersionUID = 1L;
 
-  private final PathBasedCacheDirective directive;
-  
-  public AddPathBasedCacheDirectiveException(String description,
-      PathBasedCacheDirective directive) {
+  public AddPathBasedCacheDirectiveException(String description) {
     super(description);
-    this.directive = directive;
-  }
-
-  public PathBasedCacheDirective getDirective() {
-    return directive;
   }
 
   public static final class EmptyPathError
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public EmptyPathError(String msg) {
+      super(msg);
+    }
+
     public EmptyPathError(PathBasedCacheDirective directive) {
-      super("empty path in directive " + directive, directive);
+      this("empty path in directive " + directive);
     }
   }
 
@@ -50,9 +46,12 @@ public abstract class AddPathBasedCacheD
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public InvalidPathNameError(String msg) {
+      super(msg);
+    }
+
     public InvalidPathNameError(PathBasedCacheDirective directive) {
-      super("can't handle non-absolute path name " + directive.getPath(),
-          directive);
+      this("can't handle invalid path name " + directive.getPath());
     }
   }
 
@@ -60,8 +59,12 @@ public abstract class AddPathBasedCacheD
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public InvalidPoolNameError(String msg) {
+      super(msg);
+    }
+
     public InvalidPoolNameError(PathBasedCacheDirective directive) {
-      super("invalid pool name '" + directive.getPool() + "'", directive);
+      this("invalid pool name '" + directive.getPool() + "'");
     }
   }
 
@@ -69,9 +72,12 @@ public abstract class AddPathBasedCacheD
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public PoolWritePermissionDeniedError(String msg) {
+      super(msg);
+    }
+
     public PoolWritePermissionDeniedError(PathBasedCacheDirective directive) {
-      super("write permission denied for pool '" + directive.getPool() + "'",
-            directive);
+      this("write permission denied for pool '" + directive.getPool() + "'");
     }
   }
 
@@ -79,9 +85,13 @@ public abstract class AddPathBasedCacheD
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public PathAlreadyExistsInPoolError(String msg) {
+      super(msg);
+    }
+
     public PathAlreadyExistsInPoolError(PathBasedCacheDirective directive) {
-      super("path " + directive.getPath() + " already exists in pool " +
-          directive.getPool(), directive);
+      this("path " + directive.getPath() + " already exists in pool " +
+          directive.getPool());
     }
   }
 
@@ -89,10 +99,14 @@ public abstract class AddPathBasedCacheD
       extends AddPathBasedCacheDirectiveException {
     private static final long serialVersionUID = 1L;
 
+    public UnexpectedAddPathBasedCacheDirectiveException(String msg) {
+      super(msg);
+    }
+
     public UnexpectedAddPathBasedCacheDirectiveException(
         PathBasedCacheDirective directive) {
-      super("encountered an unexpected error when trying to " +
-          "add PathBasedCache directive " + directive, directive);
+      this("encountered an unexpected error when trying to " +
+          "add PathBasedCache directive " + directive);
     }
   }
-};
+}

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CachePoolInfo.java Sat Sep 21 00:20:36 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hdfs.protocol;
 
+import java.io.IOException;
+
 import javax.annotation.Nullable;
 
 import org.apache.commons.lang.builder.EqualsBuilder;
@@ -127,4 +129,20 @@ public class CachePoolInfo {
         append(weight).
         hashCode();
   }
+
+  public static void validate(CachePoolInfo info) throws IOException {
+    if (info == null) {
+      throw new IOException("CachePoolInfo is null");
+    }
+    validateName(info.poolName);
+  }
+
+  public static void validateName(String poolName) throws IOException {
+    if (poolName == null || poolName.isEmpty()) {
+      // Empty pool names are not allowed because they would be highly
+      // confusing.  They would also break the ability to list all pools
+      // by starting with prevKey = ""
+      throw new IOException("invalid empty cache pool name");
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java Sat Sep 21 00:20:36 2013
@@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -48,7 +47,6 @@ import org.apache.hadoop.security.Access
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
-import org.apache.hadoop.util.Fallible;
 
 /**********************************************************************
  * ClientProtocol is used by user code via 
@@ -1098,28 +1096,24 @@ public interface ClientProtocol {
       String fromSnapshot, String toSnapshot) throws IOException;
 
   /**
-   * Add some PathBasedCache directives to the CacheManager.
+   * Add a PathBasedCache entry to the CacheManager.
    * 
-   * @param directives A list of PathBasedCache directives to be added.
-   * @return A Fallible list, where each element is either a successfully addded
-   *         PathBasedCache entry, or an IOException describing why the directive
-   *         could not be added.
+   * @param directive A PathBasedCacheDirective to be added
+   * @return A PathBasedCacheDescriptor associated with the added directive
+   * @throws IOException if the directive could not be added
    */
   @AtMostOnce
-  public List<Fallible<PathBasedCacheDescriptor>>
-    addPathBasedCacheDirectives(List<PathBasedCacheDirective> directives)
-      throws IOException;
+  public PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective directive) throws IOException;
 
   /**
-   * Remove some PathBasedCache entries from the CacheManager.
+   * Remove a PathBasedCacheDescriptor from the CacheManager.
    * 
-   * @param ids A list of all the entry IDs to be removed from the CacheManager.
-   * @return A Fallible list where each element is either a successfully removed
-   *         ID, or an IOException describing why the ID could not be removed.
+   * @param id of a PathBasedCacheDescriptor
+   * @throws IOException if the cache descriptor could not be removed
    */
   @AtMostOnce
-  public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
-      throws IOException;
+  public void removePathBasedCacheDescriptor(Long id) throws IOException;
 
   /**
    * List the set of cached paths of a cache pool. Incrementally fetches results
@@ -1132,9 +1126,9 @@ public interface ClientProtocol {
    * @return A RemoteIterator which returns PathBasedCacheDescriptor objects.
    */
   @Idempotent
-  public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long prevId,
-      String pool, String path) throws IOException;
-  
+  public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(
+      long prevId, String pool, String path) throws IOException;
+
   /**
    * Add a new cache pool.
    * 

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheDescriptorException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheDescriptorException.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheDescriptorException.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheDescriptorException.java Sat Sep 21 00:20:36 2013
@@ -19,31 +19,26 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
 
-import com.google.common.base.Preconditions;
-
 /**
  * An exception which occurred when trying to remove a PathBasedCache entry.
  */
 public abstract class RemovePathBasedCacheDescriptorException extends IOException {
   private static final long serialVersionUID = 1L;
 
-  private final long entryId;
-
-  public RemovePathBasedCacheDescriptorException(String description, long entryId) {
+  public RemovePathBasedCacheDescriptorException(String description) {
     super(description);
-    this.entryId = entryId;
-  }
-    
-  public long getEntryId() {
-    return this.entryId;
   }
 
   public final static class InvalidIdException
       extends RemovePathBasedCacheDescriptorException {
     private static final long serialVersionUID = 1L;
 
+    public InvalidIdException(String msg) {
+      super(msg);
+    }
+
     public InvalidIdException(long entryId) {
-      super("invalid PathBasedCacheDescriptor id " + entryId, entryId);
+      this("invalid PathBasedCacheDescriptor id " + entryId);
     }
   }
 
@@ -51,9 +46,13 @@ public abstract class RemovePathBasedCac
       extends RemovePathBasedCacheDescriptorException {
     private static final long serialVersionUID = 1L;
 
+    public RemovePermissionDeniedException(String msg) {
+      super(msg);
+    }
+
     public RemovePermissionDeniedException(long entryId) {
-      super("permission denied when trying to remove " + 
-          "PathBasedCacheDescriptor id " + entryId, entryId);
+      this("permission denied when trying to remove " + 
+          "PathBasedCacheDescriptor id " + entryId);
     }
   }
 
@@ -61,9 +60,12 @@ public abstract class RemovePathBasedCac
       extends RemovePathBasedCacheDescriptorException {
     private static final long serialVersionUID = 1L;
 
+    public NoSuchIdException(String msg) {
+      super(msg);
+    }
+
     public NoSuchIdException(long entryId) {
-      super("there is no PathBasedCacheDescriptor with id " + entryId,
-          entryId);
+      this("there is no PathBasedCacheDescriptor with id " + entryId);
     }
   }
 
@@ -71,9 +73,13 @@ public abstract class RemovePathBasedCac
       extends RemovePathBasedCacheDescriptorException {
     private static final long serialVersionUID = 1L;
 
+    public UnexpectedRemovePathBasedCacheDescriptorException(String msg) {
+      super(msg);
+    }
+
     public UnexpectedRemovePathBasedCacheDescriptorException(long id) {
-      super("encountered an unexpected error when trying to " +
-          "remove PathBasedCacheDescriptor with id " + id, id);
+      this("encountered an unexpected error when trying to " +
+          "remove PathBasedCacheDescriptor with id " + id);
     }
   }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java Sat Sep 21 00:20:36 2013
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdfs.protocolPB;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -28,11 +27,6 @@ import org.apache.hadoop.fs.FsServerDefa
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PathAlreadyExistsInPoolError;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
@@ -40,8 +34,8 @@ import org.apache.hadoop.hdfs.protocol.D
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
@@ -53,9 +47,8 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
@@ -83,8 +76,8 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsResponseProto.Builder;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsResponseProto.Builder;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetContentSummaryRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetContentSummaryResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyRequestProto;
@@ -132,9 +125,8 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2ResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@@ -174,7 +166,6 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto;
 import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.server.namenode.CachePool;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
@@ -184,7 +175,6 @@ import org.apache.hadoop.security.proto.
 import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
 import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.Fallible;
 
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
@@ -1040,44 +1030,18 @@ public class ClientNamenodeProtocolServe
   }
 
   @Override
-  public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcController controller,
-      AddPathBasedCacheDirectivesRequestProto request) throws ServiceException {
+  public AddPathBasedCacheDirectiveResponseProto addPathBasedCacheDirective(
+      RpcController controller, AddPathBasedCacheDirectiveRequestProto request)
+      throws ServiceException {
     try {
-      ArrayList<PathBasedCacheDirective> input =
-          new ArrayList<PathBasedCacheDirective>(request.getElementsCount());
-      for (int i = 0; i < request.getElementsCount(); i++) {
-        PathBasedCacheDirectiveProto proto = request.getElements(i);
-        input.add(new PathBasedCacheDirective(proto.getPath(), proto.getPool()));
-      }
-      List<Fallible<PathBasedCacheDescriptor>> output = server.addPathBasedCacheDirectives(input);
-      AddPathBasedCacheDirectivesResponseProto.Builder builder =
-         AddPathBasedCacheDirectivesResponseProto.newBuilder();
-      for (int idx = 0; idx < output.size(); idx++) {
-        try {
-          PathBasedCacheDescriptor directive = output.get(idx).get();
-          builder.addResults(directive.getEntryId());
-        } catch (IOException ioe) {
-          if (ioe.getCause() instanceof EmptyPathError) {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                EMPTY_PATH_ERROR_VALUE);
-          } else if (ioe.getCause() instanceof InvalidPathNameError) {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                INVALID_PATH_NAME_ERROR_VALUE);
-          } else if (ioe.getCause() instanceof InvalidPoolNameError) {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                INVALID_POOL_NAME_ERROR_VALUE);
-          } else if (ioe.getCause() instanceof PoolWritePermissionDeniedError) {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                ADD_PERMISSION_DENIED_ERROR_VALUE);
-          } else if (ioe.getCause() instanceof PathAlreadyExistsInPoolError) {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                PATH_ALREADY_EXISTS_IN_POOL_ERROR_VALUE);
-          } else {
-            builder.addResults(AddPathBasedCacheDirectiveErrorProto.
-                UNEXPECTED_ADD_ERROR_VALUE);
-          }
-        }
-      }
+      PathBasedCacheDirectiveProto proto = request.getDirective();
+      PathBasedCacheDirective directive =
+          new PathBasedCacheDirective(proto.getPath(), proto.getPool());
+      PathBasedCacheDescriptor descriptor =
+          server.addPathBasedCacheDirective(directive);
+      AddPathBasedCacheDirectiveResponseProto.Builder builder =
+         AddPathBasedCacheDirectiveResponseProto.newBuilder();
+      builder.setDescriptorId(descriptor.getEntryId());
       return builder.build();
     } catch (IOException e) {
       throw new ServiceException(e);
@@ -1085,32 +1049,14 @@ public class ClientNamenodeProtocolServe
   }
 
   @Override
-  public RemovePathBasedCacheDescriptorsResponseProto removePathBasedCacheDescriptors(
-      RpcController controller, RemovePathBasedCacheDescriptorsRequestProto request)
+  public RemovePathBasedCacheDescriptorResponseProto removePathBasedCacheDescriptor(
+      RpcController controller,
+      RemovePathBasedCacheDescriptorRequestProto request)
       throws ServiceException {
     try {
-      List<Fallible<Long>> output =
-         server.removePathBasedCacheDescriptors(request.getElementsList());
-      RemovePathBasedCacheDescriptorsResponseProto.Builder builder =
-         RemovePathBasedCacheDescriptorsResponseProto.newBuilder();
-      for (int idx = 0; idx < output.size(); idx++) {
-        try {
-          long id = output.get(idx).get();
-          builder.addResults(id);
-        } catch (InvalidIdException ioe) {
-          builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
-              INVALID_CACHED_PATH_ID_ERROR_VALUE);
-        } catch (NoSuchIdException ioe) {
-          builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
-              NO_SUCH_CACHED_PATH_ID_ERROR_VALUE);
-        } catch (RemovePermissionDeniedException ioe) {
-          builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
-              REMOVE_PERMISSION_DENIED_ERROR_VALUE);
-        } catch (IOException ioe) {
-          builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
-              UNEXPECTED_REMOVE_ERROR_VALUE);
-        }
-      }
+      server.removePathBasedCacheDescriptor(request.getDescriptorId());
+      RemovePathBasedCacheDescriptorResponseProto.Builder builder =
+         RemovePathBasedCacheDescriptorResponseProto.newBuilder();
       return builder.build();
     } catch (IOException e) {
       throw new ServiceException(e);

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java Sat Sep 21 00:20:36 2013
@@ -20,9 +20,7 @@ package org.apache.hadoop.hdfs.protocolP
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -38,18 +36,7 @@ import org.apache.hadoop.fs.RemoteIterat
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PathAlreadyExistsInPoolError;
-import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
-import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.UnexpectedRemovePathBasedCacheDescriptorException;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
 import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -62,17 +49,16 @@ import org.apache.hadoop.hdfs.protocol.H
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendResponseProto;
@@ -110,23 +96,21 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsElementProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseElementProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsResponseProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorErrorProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameSnapshotRequestProto;
@@ -147,7 +131,6 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
 import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.server.namenode.CacheManager;
 import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
 import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
 import org.apache.hadoop.io.EnumSetWritable;
@@ -163,7 +146,6 @@ import org.apache.hadoop.security.proto.
 import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto;
 import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.util.Fallible;
 
 import com.google.protobuf.ByteString;
 import com.google.protobuf.ServiceException;
@@ -1020,101 +1002,33 @@ public class ClientNamenodeProtocolTrans
     }
   }
 
-  private static IOException addPathBasedCacheDirectivesError(long code,
-      PathBasedCacheDirective directive) {
-    if (code == AddPathBasedCacheDirectiveErrorProto.EMPTY_PATH_ERROR_VALUE) {
-      return new EmptyPathError(directive);
-    } else if (code == AddPathBasedCacheDirectiveErrorProto.
-        INVALID_PATH_NAME_ERROR_VALUE) {
-      return new InvalidPathNameError(directive);
-    } else if (code == AddPathBasedCacheDirectiveErrorProto.
-        INVALID_POOL_NAME_ERROR_VALUE) {
-      return new InvalidPoolNameError(directive);
-    } else if (code == AddPathBasedCacheDirectiveErrorProto.
-        ADD_PERMISSION_DENIED_ERROR_VALUE) {
-      return new PoolWritePermissionDeniedError(directive);
-    } else if (code == AddPathBasedCacheDirectiveErrorProto.
-        PATH_ALREADY_EXISTS_IN_POOL_ERROR_VALUE) {
-      return new PathAlreadyExistsInPoolError(directive);
-    } else {
-      return new UnexpectedAddPathBasedCacheDirectiveException(directive);
-    }
-  }
-  
   @Override
-  public List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
-        List<PathBasedCacheDirective> directives) throws IOException {
+  public PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective directive) throws IOException {
     try {
-      AddPathBasedCacheDirectivesRequestProto.Builder builder =
-          AddPathBasedCacheDirectivesRequestProto.newBuilder();
-      for (PathBasedCacheDirective directive : directives) {
-        builder.addElements(PathBasedCacheDirectiveProto.newBuilder().
-            setPath(directive.getPath()).
-            setPool(directive.getPool()).
-            build());
-      }
-      AddPathBasedCacheDirectivesResponseProto result = 
-          rpcProxy.addPathBasedCacheDirectives(null, builder.build());
-      int resultsCount = result.getResultsCount();
-      ArrayList<Fallible<PathBasedCacheDescriptor>> results = 
-          new ArrayList<Fallible<PathBasedCacheDescriptor>>(resultsCount);
-      for (int i = 0; i < resultsCount; i++) {
-        PathBasedCacheDirective directive = directives.get(i);
-        long code = result.getResults(i);
-        if (code > 0) {
-          results.add(new Fallible<PathBasedCacheDescriptor>(
-                new PathBasedCacheDescriptor(code,
-                  directive.getPath(), directive.getPool())));
-        } else {
-          results.add(new Fallible<PathBasedCacheDescriptor>(
-                addPathBasedCacheDirectivesError(code, directive)));
-        }
-      }
-      return results;
+      AddPathBasedCacheDirectiveRequestProto.Builder builder =
+          AddPathBasedCacheDirectiveRequestProto.newBuilder();
+      builder.setDirective(PathBasedCacheDirectiveProto.newBuilder()
+          .setPath(directive.getPath())
+          .setPool(directive.getPool())
+          .build());
+      AddPathBasedCacheDirectiveResponseProto result = 
+          rpcProxy.addPathBasedCacheDirective(null, builder.build());
+      return new PathBasedCacheDescriptor(result.getDescriptorId(),
+          directive.getPath(), directive.getPool());
     } catch (ServiceException e) {
       throw ProtobufHelper.getRemoteException(e);
     }
   }
-  
-  private static IOException removePathBasedCacheDescriptorsError(long code, long id) {
-    if (code == RemovePathBasedCacheDescriptorErrorProto.
-        INVALID_CACHED_PATH_ID_ERROR_VALUE) {
-      return new InvalidIdException(id);
-    } else if (code == RemovePathBasedCacheDescriptorErrorProto.
-        NO_SUCH_CACHED_PATH_ID_ERROR_VALUE) {
-      return new NoSuchIdException(id);
-    } else if (code == RemovePathBasedCacheDescriptorErrorProto.
-        REMOVE_PERMISSION_DENIED_ERROR_VALUE) {
-      return new RemovePermissionDeniedException(id);
-    } else {
-      return new UnexpectedRemovePathBasedCacheDescriptorException(id);
-    }
-  }
 
   @Override
-  public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
+  public void removePathBasedCacheDescriptor(Long id)
       throws IOException {
     try {
-      RemovePathBasedCacheDescriptorsRequestProto.Builder builder =
-          RemovePathBasedCacheDescriptorsRequestProto.newBuilder();
-      for (Long id : ids) {
-        builder.addElements(id);
-      }
-      RemovePathBasedCacheDescriptorsResponseProto result = 
-          rpcProxy.removePathBasedCacheDescriptors(null, builder.build());
-      int resultsCount = result.getResultsCount();
-      ArrayList<Fallible<Long>> results = 
-          new ArrayList<Fallible<Long>>(resultsCount);
-      for (int i = 0; i < resultsCount; i++) {
-        long code = result.getResults(i);
-        if (code > 0) {
-          results.add(new Fallible<Long>(code));
-        } else {
-          results.add(new Fallible<Long>(
-              removePathBasedCacheDescriptorsError(code, ids.get(i))));
-        }
-      }
-      return results;
+      RemovePathBasedCacheDescriptorRequestProto.Builder builder =
+          RemovePathBasedCacheDescriptorRequestProto.newBuilder();
+      builder.setDescriptorId(id);
+      rpcProxy.removePathBasedCacheDescriptor(null, builder.build());
     } catch (ServiceException e) {
       throw ProtobufHelper.getRemoteException(e);
     }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java Sat Sep 21 00:20:36 2013
@@ -47,7 +47,6 @@ import org.apache.hadoop.hdfs.protocol.R
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.UnexpectedRemovePathBasedCacheDescriptorException;
 import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
-import org.apache.hadoop.util.Fallible;
 
 /**
  * The Cache Manager handles caching on DataNodes.
@@ -136,25 +135,24 @@ public final class CacheManager {
     return null;
   }
 
-  private synchronized Fallible<PathBasedCacheDescriptor> addDirective(
-        PathBasedCacheDirective directive, FSPermissionChecker pc) {
+  public synchronized PathBasedCacheDescriptor addDirective(
+      PathBasedCacheDirective directive, FSPermissionChecker pc)
+      throws IOException {
     CachePool pool = cachePools.get(directive.getPool());
     if (pool == null) {
       LOG.info("addDirective " + directive + ": pool not found.");
-      return new Fallible<PathBasedCacheDescriptor>(
-          new InvalidPoolNameError(directive));
+      throw new InvalidPoolNameError(directive);
     }
     if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("addDirective " + directive + ": write permission denied.");
-      return new Fallible<PathBasedCacheDescriptor>(
-          new PoolWritePermissionDeniedError(directive));
+      throw new PoolWritePermissionDeniedError(directive);
     }
     try {
       directive.validate();
     } catch (IOException ioe) {
       LOG.info("addDirective " + directive + ": validation failed: "
           + ioe.getClass().getName() + ": " + ioe.getMessage());
-      return new Fallible<PathBasedCacheDescriptor>(ioe);
+      throw ioe;
     }
     
     // Check if we already have this entry.
@@ -162,8 +160,7 @@ public final class CacheManager {
     if (existing != null) {
       LOG.info("addDirective " + directive + ": there is an " +
           "existing directive " + existing + " in this pool.");
-      return new Fallible<PathBasedCacheDescriptor>(
-          existing.getDescriptor());
+      return existing.getDescriptor();
     }
     // Add a new entry with the next available ID.
     PathBasedCacheEntry entry;
@@ -171,8 +168,7 @@ public final class CacheManager {
       entry = new PathBasedCacheEntry(getNextEntryId(),
           directive.getPath(), pool);
     } catch (IOException ioe) {
-      return new Fallible<PathBasedCacheDescriptor>(
-          new UnexpectedAddPathBasedCacheDirectiveException(directive));
+      throw new UnexpectedAddPathBasedCacheDirectiveException(directive);
     }
     LOG.info("addDirective " + directive + ": added cache directive "
         + directive);
@@ -202,56 +198,42 @@ public final class CacheManager {
     } catch (IOException ioe) {
       LOG.info("addDirective " + directive +": failed to cache file: " +
           ioe.getClass().getName() +": " + ioe.getMessage());
-      return new Fallible<PathBasedCacheDescriptor>(ioe);
+      throw ioe;
     }
-    return new Fallible<PathBasedCacheDescriptor>(
-        entry.getDescriptor());
+    return entry.getDescriptor();
   }
 
-  public synchronized List<Fallible<PathBasedCacheDescriptor>> addDirectives(
-      List<PathBasedCacheDirective> directives, FSPermissionChecker pc) {
-    ArrayList<Fallible<PathBasedCacheDescriptor>> results = 
-        new ArrayList<Fallible<PathBasedCacheDescriptor>>(directives.size());
-    for (PathBasedCacheDirective directive: directives) {
-      results.add(addDirective(directive, pc));
-    }
-    return results;
-  }
-
-  private synchronized Fallible<Long> removeDescriptor(long id,
-        FSPermissionChecker pc) {
+  public synchronized void removeDescriptor(long id, FSPermissionChecker pc)
+      throws IOException {
     // Check for invalid IDs.
     if (id <= 0) {
       LOG.info("removeDescriptor " + id + ": invalid non-positive " +
           "descriptor ID.");
-      return new Fallible<Long>(new InvalidIdException(id));
+      throw new InvalidIdException(id);
     }
     // Find the entry.
     PathBasedCacheEntry existing = entriesById.get(id);
     if (existing == null) {
       LOG.info("removeDescriptor " + id + ": entry not found.");
-      return new Fallible<Long>(new NoSuchIdException(id));
+      throw new NoSuchIdException(id);
     }
     CachePool pool = cachePools.get(existing.getDescriptor().getPool());
     if (pool == null) {
       LOG.info("removeDescriptor " + id + ": pool not found for directive " +
         existing.getDescriptor());
-      return new Fallible<Long>(
-          new UnexpectedRemovePathBasedCacheDescriptorException(id));
+      throw new UnexpectedRemovePathBasedCacheDescriptorException(id);
     }
     if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
       LOG.info("removeDescriptor " + id + ": write permission denied to " +
           "pool " + pool + " for entry " + existing);
-      return new Fallible<Long>(
-          new RemovePermissionDeniedException(id));
+      throw new RemovePermissionDeniedException(id);
     }
     
     // Remove the corresponding entry in entriesByPath.
     String path = existing.getDescriptor().getPath();
     List<PathBasedCacheEntry> entries = entriesByPath.get(path);
     if (entries == null || !entries.remove(existing)) {
-      return new Fallible<Long>(
-          new UnexpectedRemovePathBasedCacheDescriptorException(id));
+      throw new UnexpectedRemovePathBasedCacheDescriptorException(id);
     }
     if (entries.size() == 0) {
       entriesByPath.remove(path);
@@ -268,20 +250,9 @@ public final class CacheManager {
     } catch (IOException e) {
       LOG.warn("removeDescriptor " + id + ": failure while setting cache"
           + " replication factor", e);
-      return new Fallible<Long>(e);
+      throw e;
     }
     LOG.info("removeDescriptor successful for PathCacheEntry id " + id);
-    return new Fallible<Long>(id);
-  }
-
-  public synchronized List<Fallible<Long>> removeDescriptors(List<Long> ids,
-      FSPermissionChecker pc) {
-    ArrayList<Fallible<Long>> results = 
-        new ArrayList<Fallible<Long>>(ids.size());
-    for (Long id : ids) {
-      results.add(removeDescriptor(id, pc));
-    }
-    return results;
   }
 
   public synchronized BatchedListEntries<PathBasedCacheDescriptor> 
@@ -329,8 +300,8 @@ public final class CacheManager {
    */
   public synchronized void addCachePool(CachePoolInfo info)
       throws IOException {
+    CachePoolInfo.validate(info);
     String poolName = info.getPoolName();
-    CachePool.validateName(poolName);
     CachePool pool = cachePools.get(poolName);
     if (pool != null) {
       throw new IOException("cache pool " + poolName + " already exists.");
@@ -352,10 +323,8 @@ public final class CacheManager {
    */
   public synchronized void modifyCachePool(CachePoolInfo info)
       throws IOException {
+    CachePoolInfo.validate(info);
     String poolName = info.getPoolName();
-    if (poolName.isEmpty()) {
-      throw new IOException("invalid empty cache pool name");
-    }
     CachePool pool = cachePools.get(poolName);
     if (pool == null) {
       throw new IOException("cache pool " + poolName + " does not exist.");
@@ -401,9 +370,10 @@ public final class CacheManager {
    */
   public synchronized void removeCachePool(String poolName)
       throws IOException {
+    CachePoolInfo.validateName(poolName);
     CachePool pool = cachePools.remove(poolName);
     if (pool == null) {
-      throw new IOException("can't remove nonexistent cache pool " + poolName);
+      throw new IOException("can't remove non-existent cache pool " + poolName);
     }
     
     // Remove entries using this pool

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java Sat Sep 21 00:20:36 2013
@@ -24,9 +24,9 @@ import javax.annotation.Nonnull;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -162,13 +162,4 @@ public final class CachePool {
         append(", weight:").append(weight).
         append(" }").toString();
   }
-
-  public static void validateName(String name) throws IOException {
-    if (name.isEmpty()) {
-      // Empty pool names are not allowed because they would be highly
-      // confusing.  They would also break the ability to list all pools
-      // by starting with prevKey = ""
-      throw new IOException("invalid empty cache pool name");
-    }
-  }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Sat Sep 21 00:20:36 2013
@@ -132,7 +132,6 @@ import org.apache.hadoop.fs.Options;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -232,7 +231,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.delegation.DelegationKey;
 import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.DataChecksum;
-import org.apache.hadoop.util.Fallible;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.VersionInfo;
@@ -6884,18 +6882,17 @@ public class FSNamesystem implements Nam
     }
   }
 
-  @SuppressWarnings("unchecked")
-  List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
-      List<PathBasedCacheDirective> directives) throws IOException {
+  PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective directive) throws IOException {
     CacheEntryWithPayload retryCacheEntry =
         RetryCache.waitForCompletion(retryCache, null);
     if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
-      return (List<Fallible<PathBasedCacheDescriptor>>) retryCacheEntry.getPayload();
+      return (PathBasedCacheDescriptor) retryCacheEntry.getPayload();
     }
     final FSPermissionChecker pc = isPermissionEnabled ?
         getPermissionChecker() : null;
     boolean success = false;
-    List<Fallible<PathBasedCacheDescriptor>> results = null;
+    PathBasedCacheDescriptor result = null;
     checkOperation(OperationCategory.WRITE);
     writeLock();
     try {
@@ -6904,8 +6901,8 @@ public class FSNamesystem implements Nam
         throw new SafeModeException(
             "Cannot add PathBasedCache directive", safeMode);
       }
-      results = cacheManager.addDirectives(directives, pc);
-      //getEditLog().logAddPathBasedCacheDirectives(results); FIXME: HDFS-5119
+      result = cacheManager.addDirective(directive, pc);
+      //getEditLog().logAddPathBasedCacheDirective(result); FIXME: HDFS-5119
       success = true;
     } finally {
       writeUnlock();
@@ -6913,24 +6910,21 @@ public class FSNamesystem implements Nam
         getEditLog().logSync();
       }
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "addPathBasedCacheDirectives", null, null, null);
+        logAuditEvent(success, "addPathBasedCacheDirective", null, null, null);
       }
-      RetryCache.setState(retryCacheEntry, success, results);
+      RetryCache.setState(retryCacheEntry, success, result);
     }
-    return results;
+    return result;
   }
 
-  @SuppressWarnings("unchecked")
-  List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids) throws IOException {
-    CacheEntryWithPayload retryCacheEntry =
-        RetryCache.waitForCompletion(retryCache, null);
+  void removePathBasedCacheDescriptor(Long id) throws IOException {
+    CacheEntry retryCacheEntry = RetryCache.waitForCompletion(retryCache);
     if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
-      return (List<Fallible<Long>>) retryCacheEntry.getPayload();
+      return;
     }
     final FSPermissionChecker pc = isPermissionEnabled ?
         getPermissionChecker() : null;
     boolean success = false;
-    List<Fallible<Long>> results = null;
     checkOperation(OperationCategory.WRITE);
     writeLock();
     try {
@@ -6939,22 +6933,22 @@ public class FSNamesystem implements Nam
         throw new SafeModeException(
             "Cannot remove PathBasedCache directives", safeMode);
       }
-      results = cacheManager.removeDescriptors(ids, pc);
+      cacheManager.removeDescriptor(id, pc);
       //getEditLog().logRemovePathBasedCacheEntries(results); FIXME: HDFS-5119
       success = true;
     } finally {
       writeUnlock();
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "removePathBasedCacheDescriptors", null, null, null);
+        logAuditEvent(success, "removePathBasedCacheDescriptors", null, null,
+            null);
       }
-      RetryCache.setState(retryCacheEntry, success, results);
+      RetryCache.setState(retryCacheEntry, success);
     }
     getEditLog().logSync();
-    return results;
   }
 
-  BatchedListEntries<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long startId,
-      String pool, String path) throws IOException {
+  BatchedListEntries<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(
+      long startId, String pool, String path) throws IOException {
     final FSPermissionChecker pc = isPermissionEnabled ?
         getPermissionChecker() : null;
     BatchedListEntries<PathBasedCacheDescriptor> results;
@@ -6963,12 +6957,14 @@ public class FSNamesystem implements Nam
     boolean success = false;
     try {
       checkOperation(OperationCategory.READ);
-      results = cacheManager.listPathBasedCacheDescriptors(startId, pool, path, pc);
+      results =
+          cacheManager.listPathBasedCacheDescriptors(startId, pool, path, pc);
       success = true;
     } finally {
       readUnlock();
       if (isAuditEnabled() && isExternalInvocation()) {
-        logAuditEvent(success, "listPathBasedCacheDescriptors", null, null, null);
+        logAuditEvent(success, "listPathBasedCacheDescriptors", null, null,
+            null);
       }
     }
     return results;

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java Sat Sep 21 00:20:36 2013
@@ -139,7 +139,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
 import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolPB;
 import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
-import org.apache.hadoop.util.Fallible;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionUtil;
 
@@ -1238,15 +1237,14 @@ class NameNodeRpcServer implements Namen
   }
 
   @Override
-  public List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
-      List<PathBasedCacheDirective> paths) throws IOException {
-    return namesystem.addPathBasedCacheDirectives(paths);
+  public PathBasedCacheDescriptor addPathBasedCacheDirective(
+      PathBasedCacheDirective path) throws IOException {
+    return namesystem.addPathBasedCacheDirective(path);
   }
 
   @Override
-  public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
-      throws IOException {
-    return namesystem.removePathBasedCacheDescriptors(ids);
+  public void removePathBasedCacheDescriptor(Long id) throws IOException {
+    namesystem.removePathBasedCacheDescriptor(id);
   }
 
   private class ServerSidePathBasedCacheEntriesIterator

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java Sat Sep 21 00:20:36 2013
@@ -22,15 +22,13 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.tools.TableListing.Justification;
-import org.apache.hadoop.util.Fallible;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -96,21 +94,14 @@ public class CacheAdmin {
       }
         
       DistributedFileSystem dfs = getDFS();
-      List<PathBasedCacheDirective> directives =
-          new LinkedList<PathBasedCacheDirective>();
-      PathBasedCacheDirective directive = new PathBasedCacheDirective(path, poolName);
-      directives.add(directive);
-      List<Fallible<PathBasedCacheDescriptor>> results =
-          dfs.addPathBasedCacheDirective(directives);
-      try {
-        PathBasedCacheDescriptor entry = results.get(0).get();
-        System.out.println("Added PathBasedCache entry " + entry.getEntryId());
-        return 0;
-      } catch (IOException e) {
-        System.err.println("Error adding cache directive " + directive + ": " +
-          e.getMessage());
-        return 1;
-      }
+      PathBasedCacheDirective directive =
+          new PathBasedCacheDirective(path, poolName);
+
+      PathBasedCacheDescriptor descriptor =
+          dfs.addPathBasedCacheDirective(directive);
+      System.out.println("Added PathBasedCache entry "
+          + descriptor.getEntryId());
+      return 0;
     }
   }
 
@@ -153,18 +144,10 @@ public class CacheAdmin {
         return 1;
       }
       DistributedFileSystem dfs = getDFS();
-      List<Long> ids = new LinkedList<Long>();
-      ids.add(id);
-      List<Fallible<Long>> results = dfs.removePathBasedCacheDescriptors(ids);
-      try {
-        Long resultId = results.get(0).get();
-        System.out.println("Removed PathBasedCache entry " + resultId);
-        return 0;
-      } catch (IOException e) {
-        System.err.println("Error removing cache directive " + id + ": " +
-          e.getMessage());
-        return 1;
-      }
+      dfs.removePathBasedCacheDescriptor(new PathBasedCacheDescriptor(id, null,
+          null));
+      System.out.println("Removed PathBasedCache directive " + id);
+      return 0;
     }
   }
 

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto Sat Sep 21 00:20:36 2013
@@ -368,36 +368,19 @@ message PathBasedCacheDirectiveProto {
   required string pool = 2;
 }
 
-message AddPathBasedCacheDirectivesRequestProto {
-  repeated PathBasedCacheDirectiveProto elements = 1;
+message AddPathBasedCacheDirectiveRequestProto {
+  required PathBasedCacheDirectiveProto directive = 1;
 }
 
-message AddPathBasedCacheDirectivesResponseProto {
-  repeated int64 results = 1 [packed=true];
+message AddPathBasedCacheDirectiveResponseProto {
+  required int64 descriptorId = 1;
 }
 
-enum AddPathBasedCacheDirectiveErrorProto {
-  UNEXPECTED_ADD_ERROR = -1;
-  EMPTY_PATH_ERROR = -2;
-  INVALID_PATH_NAME_ERROR = -3;
-  INVALID_POOL_NAME_ERROR = -4;
-  ADD_PERMISSION_DENIED_ERROR = -5;
-  PATH_ALREADY_EXISTS_IN_POOL_ERROR = -6;
+message RemovePathBasedCacheDescriptorRequestProto {
+  required int64 descriptorId = 1;
 }
 
-message RemovePathBasedCacheDescriptorsRequestProto {
-  repeated int64 elements = 1 [packed=true];
-}
-
-message RemovePathBasedCacheDescriptorsResponseProto {
-  repeated int64 results = 1 [packed=true];
-}
-
-enum RemovePathBasedCacheDescriptorErrorProto {
-  UNEXPECTED_REMOVE_ERROR = -1;
-  INVALID_CACHED_PATH_ID_ERROR = -2;
-  NO_SUCH_CACHED_PATH_ID_ERROR = -3;
-  REMOVE_PERMISSION_DENIED_ERROR = -4;
+message RemovePathBasedCacheDescriptorResponseProto {
 }
 
 message ListPathBasedCacheDescriptorsRequestProto {
@@ -644,10 +627,10 @@ service ClientNamenodeProtocol {
       returns(ListCorruptFileBlocksResponseProto);
   rpc metaSave(MetaSaveRequestProto) returns(MetaSaveResponseProto);
   rpc getFileInfo(GetFileInfoRequestProto) returns(GetFileInfoResponseProto);
-  rpc addPathBasedCacheDirectives(AddPathBasedCacheDirectivesRequestProto)
-      returns (AddPathBasedCacheDirectivesResponseProto);
-  rpc removePathBasedCacheDescriptors(RemovePathBasedCacheDescriptorsRequestProto)
-      returns (RemovePathBasedCacheDescriptorsResponseProto);
+  rpc addPathBasedCacheDirective(AddPathBasedCacheDirectiveRequestProto)
+      returns (AddPathBasedCacheDirectiveResponseProto);
+  rpc removePathBasedCacheDescriptor(RemovePathBasedCacheDescriptorRequestProto)
+      returns (RemovePathBasedCacheDescriptorResponseProto);
   rpc listPathBasedCacheDescriptors(ListPathBasedCacheDescriptorsRequestProto)
       returns (ListPathBasedCacheDescriptorsResponseProto);
   rpc addCachePool(AddCachePoolRequestProto)

Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java?rev=1525183&r1=1525182&r2=1525183&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java Sat Sep 21 00:20:36 2013
@@ -36,12 +36,11 @@ import org.apache.hadoop.fs.RemoteIterat
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.io.nativeio.NativeIO;
-import org.apache.hadoop.util.Fallible;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -148,17 +147,13 @@ public class TestCacheReplicationManager
     waitForExpectedNumCachedBlocks(expected);
     // Cache and check each path in sequence
     for (int i=0; i<numFiles; i++) {
-      List<PathBasedCacheDirective> toAdd =
-          new ArrayList<PathBasedCacheDirective>();
-      toAdd.add(new PathBasedCacheDirective(paths.get(i), pool));
-      List<Fallible<PathBasedCacheDescriptor>> fallibles =
-          nnRpc.addPathBasedCacheDirectives(toAdd);
-      assertEquals("Unexpected number of fallibles",
-          1, fallibles.size());
-      PathBasedCacheDescriptor directive = fallibles.get(0).get();
-      assertEquals("Directive does not match requested path", paths.get(i),
+      PathBasedCacheDirective directive = new PathBasedCacheDirective(paths
+          .get(i), pool);
+      PathBasedCacheDescriptor descriptor =
+          nnRpc.addPathBasedCacheDirective(directive);
+      assertEquals("Descriptor does not match requested path", paths.get(i),
           directive.getPath());
-      assertEquals("Directive does not match requested pool", pool,
+      assertEquals("Descriptor does not match requested pool", pool,
           directive.getPool());
       expected += numBlocksPerFile;
       waitForExpectedNumCachedBlocks(expected);
@@ -167,14 +162,8 @@ public class TestCacheReplicationManager
     RemoteIterator<PathBasedCacheDescriptor> entries =
         nnRpc.listPathBasedCacheDescriptors(0, null, null);
     for (int i=0; i<numFiles; i++) {
-      PathBasedCacheDescriptor entry = entries.next();
-      List<Long> toRemove = new ArrayList<Long>();
-      toRemove.add(entry.getEntryId());
-      List<Fallible<Long>> fallibles = nnRpc.removePathBasedCacheDescriptors(toRemove);
-      assertEquals("Unexpected number of fallibles", 1, fallibles.size());
-      Long l = fallibles.get(0).get();
-      assertEquals("Removed entryId does not match requested",
-          entry.getEntryId(), l.longValue());
+      PathBasedCacheDescriptor descriptor = entries.next();
+      nnRpc.removePathBasedCacheDescriptor(descriptor.getEntryId());
       expected -= numBlocksPerFile;
       waitForExpectedNumCachedBlocks(expected);
     }