You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cm...@apache.org on 2013/09/12 05:55:11 UTC
svn commit: r1522272 [1/2] - in
/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/bin/ src/main/java/org/apache/hadoop/hdfs/
src/main/java/org/apache/hadoop/hdfs/protocol/
src/main/java/org/apache/hadoop/hdfs/protocolPB/ sr...
Author: cmccabe
Date: Thu Sep 12 03:55:10 2013
New Revision: 1522272
URL: http://svn.apache.org/r1522272
Log:
HDFS-5158. Add command-line support for manipulating cache directives
Added:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java
- copied, changed from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java
- copied, changed from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java
- copied, changed from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java
- copied, changed from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java
- copied, changed from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java
Removed:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathCacheRequests.java
Modified:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt Thu Sep 12 03:55:10 2013
@@ -30,6 +30,9 @@ HDFS-4949 (Unreleased)
HDFS-5120. Add command-line support for manipulating cache pools.
(Contributed by Colin Patrick McCabe)
+ HDFS-5158. Add command-line support for manipulating cache directives.
+ (Contributed by Colin Patrick McCabe)
+
OPTIMIZATIONS
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs Thu Sep 12 03:55:10 2013
@@ -59,6 +59,7 @@ function print_usage(){
echo " Use -help to see options"
echo " portmap run a portmap service"
echo " nfs3 run an NFS version 3 gateway"
+ echo " cacheadmin configure the HDFS cache"
echo ""
echo "Most commands print help when invoked w/o parameters."
}
@@ -155,6 +156,8 @@ elif [ "$COMMAND" = "portmap" ] ; then
CLASS=org.apache.hadoop.portmap.Portmap
elif [ "$COMMAND" = "nfs3" ] ; then
CLASS=org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
+elif [ "$COMMAND" = "cacheadmin" ] ; then
+ CLASS=org.apache.hadoop.hdfs.tools.CacheAdmin
else
CLASS="$COMMAND"
fi
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Thu Sep 12 03:55:10 2013
@@ -67,6 +67,8 @@ import org.apache.hadoop.hdfs.protocol.H
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
@@ -77,6 +79,7 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Fallible;
import org.apache.hadoop.util.Progressable;
import com.google.common.annotations.VisibleForTesting;
@@ -1581,6 +1584,45 @@ public class DistributedFileSystem exten
}
/**
+ * Add some PathBasedCache directives.
+ *
+ * @param directives A list of PathBasedCache directives to be added.
+ * @return A Fallible list, where each element is either a successfully addded
+ * PathBasedCache entry, or an IOException describing why the directive
+ * could not be added.
+ */
+ public List<Fallible<PathBasedCacheEntry>>
+ addPathBasedCacheDirective(List<PathBasedCacheDirective> directives)
+ throws IOException {
+ return dfs.namenode.addPathBasedCacheDirectives(directives);
+ }
+
+ /**
+ * Remove some PathBasedCache entries.
+ *
+ * @param ids A list of all the entry IDs to be removed.
+ * @return A Fallible list where each element is either a successfully removed
+ * ID, or an IOException describing why the ID could not be removed.
+ */
+ public List<Fallible<Long>>
+ removePathBasedCacheEntries(List<Long> ids) throws IOException {
+ return dfs.namenode.removePathBasedCacheEntries(ids);
+ }
+
+ /**
+ * List the set of cached paths of a cache pool. Incrementally fetches results
+ * from the server.
+ *
+ * @param pool The cache pool to list, or null to list all pools.
+ * @param path The path name to list, or null to list all paths.
+ * @return A RemoteIterator which returns PathBasedCacheEntry objects.
+ */
+ public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(
+ String pool, String path) throws IOException {
+ return dfs.namenode.listPathBasedCacheEntries(0, pool, path);
+ }
+
+ /**
* Add a cache pool.
*
* @param req
Copied: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java (from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java?p2=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java&p1=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java&r1=1522246&r2=1522272&rev=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathCacheDirectiveException.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java Thu Sep 12 03:55:10 2013
@@ -20,69 +20,69 @@ package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
/**
- * An exception which occurred when trying to add a path cache directive.
+ * An exception which occurred when trying to add a PathBasedCache directive.
*/
-public abstract class AddPathCacheDirectiveException extends IOException {
+public abstract class AddPathBasedCacheDirectiveException extends IOException {
private static final long serialVersionUID = 1L;
- private final PathCacheDirective directive;
+ private final PathBasedCacheDirective directive;
- public AddPathCacheDirectiveException(String description,
- PathCacheDirective directive) {
+ public AddPathBasedCacheDirectiveException(String description,
+ PathBasedCacheDirective directive) {
super(description);
this.directive = directive;
}
- public PathCacheDirective getDirective() {
+ public PathBasedCacheDirective getDirective() {
return directive;
}
public static final class EmptyPathError
- extends AddPathCacheDirectiveException {
+ extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
- public EmptyPathError(PathCacheDirective directive) {
+ public EmptyPathError(PathBasedCacheDirective directive) {
super("empty path in directive " + directive, directive);
}
}
public static class InvalidPathNameError
- extends AddPathCacheDirectiveException {
+ extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
- public InvalidPathNameError(PathCacheDirective directive) {
+ public InvalidPathNameError(PathBasedCacheDirective directive) {
super("can't handle non-absolute path name " + directive.getPath(),
directive);
}
}
public static class InvalidPoolNameError
- extends AddPathCacheDirectiveException {
+ extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
- public InvalidPoolNameError(PathCacheDirective directive) {
+ public InvalidPoolNameError(PathBasedCacheDirective directive) {
super("invalid pool name '" + directive.getPool() + "'", directive);
}
}
public static class PoolWritePermissionDeniedError
- extends AddPathCacheDirectiveException {
+ extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
- public PoolWritePermissionDeniedError(PathCacheDirective directive) {
+ public PoolWritePermissionDeniedError(PathBasedCacheDirective directive) {
super("write permission denied for pool '" + directive.getPool() + "'",
directive);
}
}
- public static class UnexpectedAddPathCacheDirectiveException
- extends AddPathCacheDirectiveException {
+ public static class UnexpectedAddPathBasedCacheDirectiveException
+ extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
- public UnexpectedAddPathCacheDirectiveException(
- PathCacheDirective directive) {
+ public UnexpectedAddPathBasedCacheDirectiveException(
+ PathBasedCacheDirective directive) {
super("encountered an unexpected error when trying to " +
- "add path cache directive " + directive, directive);
+ "add PathBasedCache directive " + directive, directive);
}
}
};
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java Thu Sep 12 03:55:10 2013
@@ -1098,27 +1098,27 @@ public interface ClientProtocol {
String fromSnapshot, String toSnapshot) throws IOException;
/**
- * Add some path cache directives to the CacheManager.
+ * Add some PathBasedCache directives to the CacheManager.
*
- * @param directives A list of path cache directives to be added.
+ * @param directives A list of PathBasedCache directives to be added.
* @return A Fallible list, where each element is either a successfully addded
- * path cache entry, or an IOException describing why the directive
+ * PathBasedCache entry, or an IOException describing why the directive
* could not be added.
*/
@AtMostOnce
- public List<Fallible<PathCacheEntry>>
- addPathCacheDirectives(List<PathCacheDirective> directives)
+ public List<Fallible<PathBasedCacheEntry>>
+ addPathBasedCacheDirectives(List<PathBasedCacheDirective> directives)
throws IOException;
/**
- * Remove some path cache entries from the CacheManager.
+ * Remove some PathBasedCache entries from the CacheManager.
*
* @param ids A list of all the entry IDs to be removed from the CacheManager.
* @return A Fallible list where each element is either a successfully removed
* ID, or an IOException describing why the ID could not be removed.
*/
@AtMostOnce
- public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
throws IOException;
/**
@@ -1126,13 +1126,14 @@ public interface ClientProtocol {
* from the server.
*
* @param prevId The last listed entry ID, or -1 if this is the first call to
- * listPathCacheEntries.
- * @param pool The cache pool to list, or the empty string to list all pools
- * @return A RemoteIterator which returns PathCacheEntry objects.
+ * listPathBasedCacheEntries.
+ * @param pool The cache pool to list, or null to list all pools.
+ * @param path The path name to list, or null to list all paths.
+ * @return A RemoteIterator which returns PathBasedCacheEntry objects.
*/
@Idempotent
- public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
- String pool) throws IOException;
+ public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+ String pool, String path) throws IOException;
/**
* Add a new cache pool.
Copied: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java (from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java?p2=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java&p1=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java&r1=1522246&r2=1522272&rev=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheDirective.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java Thu Sep 12 03:55:10 2013
@@ -24,19 +24,19 @@ import com.google.common.collect.Compari
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
/**
* A directive to add a path to a cache pool.
*/
-public class PathCacheDirective implements Comparable<PathCacheDirective> {
+public class PathBasedCacheDirective implements Comparable<PathBasedCacheDirective> {
private final String path;
private final String pool;
- public PathCacheDirective(String path, String pool) {
+ public PathBasedCacheDirective(String path, String pool) {
Preconditions.checkNotNull(path);
Preconditions.checkNotNull(pool);
this.path = path;
@@ -58,10 +58,10 @@ public class PathCacheDirective implemen
}
/**
- * Check if this PathCacheDirective is valid.
+ * Check if this PathBasedCacheDirective is valid.
*
* @throws IOException
- * If this PathCacheDirective is not valid.
+ * If this PathBasedCacheDirective is not valid.
*/
public void validate() throws IOException {
if (path.isEmpty()) {
@@ -76,7 +76,7 @@ public class PathCacheDirective implemen
}
@Override
- public int compareTo(PathCacheDirective rhs) {
+ public int compareTo(PathBasedCacheDirective rhs) {
return ComparisonChain.start().
compare(pool, rhs.getPool()).
compare(path, rhs.getPath()).
@@ -91,7 +91,7 @@ public class PathCacheDirective implemen
@Override
public boolean equals(Object o) {
try {
- PathCacheDirective other = (PathCacheDirective)o;
+ PathBasedCacheDirective other = (PathBasedCacheDirective)o;
return other.compareTo(this) == 0;
} catch (ClassCastException e) {
return false;
Copied: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java (from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java?p2=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java&p1=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java&r1=1522246&r2=1522272&rev=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathCacheEntry.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java Thu Sep 12 03:55:10 2013
@@ -23,13 +23,13 @@ import org.apache.commons.lang.builder.H
import com.google.common.base.Preconditions;
/**
- * An entry in the NameNode's path cache.
+ * An entry in the NameNode's PathBasedCache.
*/
-public final class PathCacheEntry {
+public final class PathBasedCacheEntry {
private final long entryId;
- private final PathCacheDirective directive;
+ private final PathBasedCacheDirective directive;
- public PathCacheEntry(long entryId, PathCacheDirective directive) {
+ public PathBasedCacheEntry(long entryId, PathBasedCacheDirective directive) {
Preconditions.checkArgument(entryId > 0);
this.entryId = entryId;
this.directive = directive;
@@ -39,14 +39,14 @@ public final class PathCacheEntry {
return entryId;
}
- public PathCacheDirective getDirective() {
+ public PathBasedCacheDirective getDirective() {
return directive;
}
@Override
public boolean equals(Object o) {
try {
- PathCacheEntry other = (PathCacheEntry)o;
+ PathBasedCacheEntry other = (PathBasedCacheEntry)o;
return new EqualsBuilder().
append(this.entryId, other.entryId).
append(this.directive, other.directive).
Copied: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java (from r1522246, hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java?p2=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java&p1=hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java&r1=1522246&r2=1522272&rev=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathCacheEntryException.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/RemovePathBasedCacheEntryException.java Thu Sep 12 03:55:10 2013
@@ -22,14 +22,14 @@ import java.io.IOException;
import com.google.common.base.Preconditions;
/**
- * An exception which occurred when trying to remove a path cache entry.
+ * An exception which occurred when trying to remove a PathBasedCache entry.
*/
-public abstract class RemovePathCacheEntryException extends IOException {
+public abstract class RemovePathBasedCacheEntryException extends IOException {
private static final long serialVersionUID = 1L;
private final long entryId;
- public RemovePathCacheEntryException(String description, long entryId) {
+ public RemovePathBasedCacheEntryException(String description, long entryId) {
super(description);
this.entryId = entryId;
}
@@ -39,7 +39,7 @@ public abstract class RemovePathCacheEnt
}
public final static class InvalidIdException
- extends RemovePathCacheEntryException {
+ extends RemovePathBasedCacheEntryException {
private static final long serialVersionUID = 1L;
public InvalidIdException(long entryId) {
@@ -48,31 +48,31 @@ public abstract class RemovePathCacheEnt
}
public final static class RemovePermissionDeniedException
- extends RemovePathCacheEntryException {
+ extends RemovePathBasedCacheEntryException {
private static final long serialVersionUID = 1L;
public RemovePermissionDeniedException(long entryId) {
- super("permission denied when trying to remove path cache entry id " +
+ super("permission denied when trying to remove PathBasedCache entry id " +
entryId, entryId);
}
}
public final static class NoSuchIdException
- extends RemovePathCacheEntryException {
+ extends RemovePathBasedCacheEntryException {
private static final long serialVersionUID = 1L;
public NoSuchIdException(long entryId) {
- super("there is no path cache entry with id " + entryId, entryId);
+ super("there is no PathBasedCache entry with id " + entryId, entryId);
}
}
- public final static class UnexpectedRemovePathCacheEntryException
- extends RemovePathCacheEntryException {
+ public final static class UnexpectedRemovePathBasedCacheEntryException
+ extends RemovePathBasedCacheEntryException {
private static final long serialVersionUID = 1L;
- public UnexpectedRemovePathCacheEntryException(long id) {
+ public UnexpectedRemovePathBasedCacheEntryException(long id) {
super("encountered an unexpected error when trying to " +
- "remove path cache entry id " + id, id);
+ "remove PathBasedCache entry id " + id, id);
}
}
}
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java Thu Sep 12 03:55:10 2013
@@ -28,9 +28,10 @@ import org.apache.hadoop.fs.FsServerDefa
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
@@ -38,11 +39,11 @@ import org.apache.hadoop.hdfs.protocol.D
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
@@ -51,9 +52,9 @@ import org.apache.hadoop.hdfs.protocol.p
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
@@ -114,25 +115,25 @@ import org.apache.hadoop.hdfs.protocol.p
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntryErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2ResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@@ -174,7 +175,6 @@ import org.apache.hadoop.hdfs.security.t
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.CachePool;
import org.apache.hadoop.hdfs.server.namenode.INodeId;
-import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
@@ -1039,34 +1039,39 @@ public class ClientNamenodeProtocolServe
}
@Override
- public AddPathCacheDirectivesResponseProto addPathCacheDirectives(RpcController controller,
- AddPathCacheDirectivesRequestProto request) throws ServiceException {
+ public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcController controller,
+ AddPathBasedCacheDirectivesRequestProto request) throws ServiceException {
try {
- ArrayList<PathCacheDirective> input =
- new ArrayList<PathCacheDirective>(request.getElementsCount());
+ ArrayList<PathBasedCacheDirective> input =
+ new ArrayList<PathBasedCacheDirective>(request.getElementsCount());
for (int i = 0; i < request.getElementsCount(); i++) {
- PathCacheDirectiveProto proto = request.getElements(i);
- input.add(new PathCacheDirective(proto.getPath(), proto.getPool()));
+ PathBasedCacheDirectiveProto proto = request.getElements(i);
+ input.add(new PathBasedCacheDirective(proto.getPath(), proto.getPool()));
}
- List<Fallible<PathCacheEntry>> output = server.addPathCacheDirectives(input);
- AddPathCacheDirectivesResponseProto.Builder builder =
- AddPathCacheDirectivesResponseProto.newBuilder();
+ List<Fallible<PathBasedCacheEntry>> output = server.addPathBasedCacheDirectives(input);
+ AddPathBasedCacheDirectivesResponseProto.Builder builder =
+ AddPathBasedCacheDirectivesResponseProto.newBuilder();
for (int idx = 0; idx < output.size(); idx++) {
try {
- PathCacheEntry entry = output.get(idx).get();
+ PathBasedCacheEntry entry = output.get(idx).get();
builder.addResults(entry.getEntryId());
- } catch (EmptyPathError ioe) {
- builder.addResults(AddPathCacheDirectiveErrorProto.
- EMPTY_PATH_ERROR_VALUE);
- } catch (InvalidPathNameError ioe) {
- builder.addResults(AddPathCacheDirectiveErrorProto.
- INVALID_PATH_NAME_ERROR_VALUE);
- } catch (InvalidPoolNameError ioe) {
- builder.addResults(AddPathCacheDirectiveErrorProto.
- INVALID_POOL_NAME_ERROR_VALUE);
} catch (IOException ioe) {
- builder.addResults(AddPathCacheDirectiveErrorProto.
- UNEXPECTED_ADD_ERROR_VALUE);
+ if (ioe.getCause() instanceof EmptyPathError) {
+ builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+ EMPTY_PATH_ERROR_VALUE);
+ } else if (ioe.getCause() instanceof InvalidPathNameError) {
+ builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+ INVALID_PATH_NAME_ERROR_VALUE);
+ } else if (ioe.getCause() instanceof InvalidPoolNameError) {
+ builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+ INVALID_POOL_NAME_ERROR_VALUE);
+ } else if (ioe.getCause() instanceof PoolWritePermissionDeniedError) {
+ builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+ ADD_PERMISSION_DENIED_ERROR_VALUE);
+ } else {
+ builder.addResults(AddPathBasedCacheDirectiveErrorProto.
+ UNEXPECTED_ADD_ERROR_VALUE);
+ }
}
}
return builder.build();
@@ -1076,29 +1081,29 @@ public class ClientNamenodeProtocolServe
}
@Override
- public RemovePathCacheEntriesResponseProto removePathCacheEntries(
- RpcController controller, RemovePathCacheEntriesRequestProto request)
+ public RemovePathBasedCacheEntriesResponseProto removePathBasedCacheEntries(
+ RpcController controller, RemovePathBasedCacheEntriesRequestProto request)
throws ServiceException {
try {
List<Fallible<Long>> output =
- server.removePathCacheEntries(request.getElementsList());
- RemovePathCacheEntriesResponseProto.Builder builder =
- RemovePathCacheEntriesResponseProto.newBuilder();
+ server.removePathBasedCacheEntries(request.getElementsList());
+ RemovePathBasedCacheEntriesResponseProto.Builder builder =
+ RemovePathBasedCacheEntriesResponseProto.newBuilder();
for (int idx = 0; idx < output.size(); idx++) {
try {
long id = output.get(idx).get();
builder.addResults(id);
} catch (InvalidIdException ioe) {
- builder.addResults(RemovePathCacheEntryErrorProto.
+ builder.addResults(RemovePathBasedCacheEntryErrorProto.
INVALID_CACHED_PATH_ID_ERROR_VALUE);
} catch (NoSuchIdException ioe) {
- builder.addResults(RemovePathCacheEntryErrorProto.
+ builder.addResults(RemovePathBasedCacheEntryErrorProto.
NO_SUCH_CACHED_PATH_ID_ERROR_VALUE);
} catch (RemovePermissionDeniedException ioe) {
- builder.addResults(RemovePathCacheEntryErrorProto.
+ builder.addResults(RemovePathBasedCacheEntryErrorProto.
REMOVE_PERMISSION_DENIED_ERROR_VALUE);
} catch (IOException ioe) {
- builder.addResults(RemovePathCacheEntryErrorProto.
+ builder.addResults(RemovePathBasedCacheEntryErrorProto.
UNEXPECTED_REMOVE_ERROR_VALUE);
}
}
@@ -1109,20 +1114,32 @@ public class ClientNamenodeProtocolServe
}
@Override
- public ListPathCacheEntriesResponseProto listPathCacheEntries(RpcController controller,
- ListPathCacheEntriesRequestProto request) throws ServiceException {
- try {
- RemoteIterator<PathCacheEntry> iter =
- server.listPathCacheEntries(request.getPrevId(), request.getPool());
- ListPathCacheEntriesResponseProto.Builder builder =
- ListPathCacheEntriesResponseProto.newBuilder();
+ public ListPathBasedCacheEntriesResponseProto listPathBasedCacheEntries(
+ RpcController controller, ListPathBasedCacheEntriesRequestProto request)
+ throws ServiceException {
+ try {
+ RemoteIterator<PathBasedCacheEntry> iter =
+ server.listPathBasedCacheEntries(request.getPrevId(),
+ request.hasPool() ? request.getPool() : null,
+ request.hasPath() ? request.getPath() : null);
+ ListPathBasedCacheEntriesResponseProto.Builder builder =
+ ListPathBasedCacheEntriesResponseProto.newBuilder();
+ long prevId = 0;
while (iter.hasNext()) {
- PathCacheEntry entry = iter.next();
+ PathBasedCacheEntry entry = iter.next();
builder.addElements(
- ListPathCacheEntriesElementProto.newBuilder().
+ ListPathBasedCacheEntriesElementProto.newBuilder().
setId(entry.getEntryId()).
setPath(entry.getDirective().getPath()).
setPool(entry.getDirective().getPool()));
+ prevId = entry.getEntryId();
+ }
+ if (prevId == 0) {
+ builder.setHasMore(false);
+ } else {
+ iter = server.listPathBasedCacheEntries(prevId, request.getPool(),
+ request.getPath());
+ builder.setHasMore(iter.hasNext());
}
return builder.build();
} catch (IOException e) {
@@ -1199,6 +1216,7 @@ public class ClientNamenodeProtocolServe
server.listCachePools(request.getPrevPoolName());
ListCachePoolsResponseProto.Builder responseBuilder =
ListCachePoolsResponseProto.newBuilder();
+ String prevPoolName = null;
while (iter.hasNext()) {
CachePoolInfo pool = iter.next();
ListCachePoolsResponseElementProto.Builder elemBuilder =
@@ -1217,6 +1235,14 @@ public class ClientNamenodeProtocolServe
elemBuilder.setWeight(pool.getWeight());
}
responseBuilder.addElements(elemBuilder.build());
+ prevPoolName = pool.getPoolName();
+ }
+ // fill in hasNext
+ if (prevPoolName == null) {
+ responseBuilder.setHasMore(false);
+ } else {
+ iter = server.listCachePools(prevPoolName);
+ responseBuilder.setHasMore(iter.hasNext());
}
return responseBuilder.build();
} catch (IOException e) {
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java Thu Sep 12 03:55:10 2013
@@ -23,7 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.NoSuchElementException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -39,16 +38,16 @@ import org.apache.hadoop.fs.RemoteIterat
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.EmptyPathError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPathNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@@ -68,10 +67,10 @@ import org.apache.hadoop.hdfs.protocol.S
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathCacheDirectiveProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectiveErrorProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathCacheDirectivesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectivesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AllowSnapshotRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendResponseProto;
@@ -109,10 +108,10 @@ import org.apache.hadoop.hdfs.protocol.p
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesElementProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
@@ -122,9 +121,9 @@ import org.apache.hadoop.hdfs.protocol.p
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntriesResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathCacheEntryErrorProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@@ -1018,47 +1017,47 @@ public class ClientNamenodeProtocolTrans
}
}
- private static IOException addPathCacheDirectivesError(long code,
- PathCacheDirective directive) {
- if (code == AddPathCacheDirectiveErrorProto.EMPTY_PATH_ERROR_VALUE) {
+ private static IOException addPathBasedCacheDirectivesError(long code,
+ PathBasedCacheDirective directive) {
+ if (code == AddPathBasedCacheDirectiveErrorProto.EMPTY_PATH_ERROR_VALUE) {
return new EmptyPathError(directive);
- } else if (code == AddPathCacheDirectiveErrorProto.
+ } else if (code == AddPathBasedCacheDirectiveErrorProto.
INVALID_PATH_NAME_ERROR_VALUE) {
return new InvalidPathNameError(directive);
- } else if (code == AddPathCacheDirectiveErrorProto.
+ } else if (code == AddPathBasedCacheDirectiveErrorProto.
INVALID_POOL_NAME_ERROR_VALUE) {
return new InvalidPoolNameError(directive);
} else {
- return new UnexpectedAddPathCacheDirectiveException(directive);
+ return new UnexpectedAddPathBasedCacheDirectiveException(directive);
}
}
@Override
- public List<Fallible<PathCacheEntry>> addPathCacheDirectives(
- List<PathCacheDirective> directives) throws IOException {
+ public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+ List<PathBasedCacheDirective> directives) throws IOException {
try {
- AddPathCacheDirectivesRequestProto.Builder builder =
- AddPathCacheDirectivesRequestProto.newBuilder();
- for (PathCacheDirective directive : directives) {
- builder.addElements(PathCacheDirectiveProto.newBuilder().
+ AddPathBasedCacheDirectivesRequestProto.Builder builder =
+ AddPathBasedCacheDirectivesRequestProto.newBuilder();
+ for (PathBasedCacheDirective directive : directives) {
+ builder.addElements(PathBasedCacheDirectiveProto.newBuilder().
setPath(directive.getPath()).
setPool(directive.getPool()).
build());
}
- AddPathCacheDirectivesResponseProto result =
- rpcProxy.addPathCacheDirectives(null, builder.build());
+ AddPathBasedCacheDirectivesResponseProto result =
+ rpcProxy.addPathBasedCacheDirectives(null, builder.build());
int resultsCount = result.getResultsCount();
- ArrayList<Fallible<PathCacheEntry>> results =
- new ArrayList<Fallible<PathCacheEntry>>(resultsCount);
+ ArrayList<Fallible<PathBasedCacheEntry>> results =
+ new ArrayList<Fallible<PathBasedCacheEntry>>(resultsCount);
for (int i = 0; i < resultsCount; i++) {
- PathCacheDirective directive = directives.get(i);
+ PathBasedCacheDirective directive = directives.get(i);
long code = result.getResults(i);
if (code > 0) {
- results.add(new Fallible<PathCacheEntry>(
- new PathCacheEntry(code, directive)));
+ results.add(new Fallible<PathBasedCacheEntry>(
+ new PathBasedCacheEntry(code, directive)));
} else {
- results.add(new Fallible<PathCacheEntry>(
- addPathCacheDirectivesError(code, directive)));
+ results.add(new Fallible<PathBasedCacheEntry>(
+ addPathBasedCacheDirectivesError(code, directive)));
}
}
return results;
@@ -1067,32 +1066,32 @@ public class ClientNamenodeProtocolTrans
}
}
- private static IOException removePathCacheEntriesError(long code, long id) {
- if (code == RemovePathCacheEntryErrorProto.
+ private static IOException removePathBasedCacheEntriesError(long code, long id) {
+ if (code == RemovePathBasedCacheEntryErrorProto.
INVALID_CACHED_PATH_ID_ERROR_VALUE) {
return new InvalidIdException(id);
- } else if (code == RemovePathCacheEntryErrorProto.
+ } else if (code == RemovePathBasedCacheEntryErrorProto.
NO_SUCH_CACHED_PATH_ID_ERROR_VALUE) {
return new NoSuchIdException(id);
- } else if (code == RemovePathCacheEntryErrorProto.
+ } else if (code == RemovePathBasedCacheEntryErrorProto.
REMOVE_PERMISSION_DENIED_ERROR_VALUE) {
return new RemovePermissionDeniedException(id);
} else {
- return new UnexpectedRemovePathCacheEntryException(id);
+ return new UnexpectedRemovePathBasedCacheEntryException(id);
}
}
@Override
- public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
throws IOException {
try {
- RemovePathCacheEntriesRequestProto.Builder builder =
- RemovePathCacheEntriesRequestProto.newBuilder();
+ RemovePathBasedCacheEntriesRequestProto.Builder builder =
+ RemovePathBasedCacheEntriesRequestProto.newBuilder();
for (Long id : ids) {
builder.addElements(id);
}
- RemovePathCacheEntriesResponseProto result =
- rpcProxy.removePathCacheEntries(null, builder.build());
+ RemovePathBasedCacheEntriesResponseProto result =
+ rpcProxy.removePathBasedCacheEntries(null, builder.build());
int resultsCount = result.getResultsCount();
ArrayList<Fallible<Long>> results =
new ArrayList<Fallible<Long>>(resultsCount);
@@ -1102,7 +1101,7 @@ public class ClientNamenodeProtocolTrans
results.add(new Fallible<Long>(code));
} else {
results.add(new Fallible<Long>(
- removePathCacheEntriesError(code, ids.get(i))));
+ removePathBasedCacheEntriesError(code, ids.get(i))));
}
}
return results;
@@ -1111,20 +1110,20 @@ public class ClientNamenodeProtocolTrans
}
}
- private static class BatchedPathCacheEntries
- implements BatchedEntries<PathCacheEntry> {
- private ListPathCacheEntriesResponseProto response;
+ private static class BatchedPathBasedCacheEntries
+ implements BatchedEntries<PathBasedCacheEntry> {
+ private ListPathBasedCacheEntriesResponseProto response;
- BatchedPathCacheEntries(ListPathCacheEntriesResponseProto response) {
+ BatchedPathBasedCacheEntries(ListPathBasedCacheEntriesResponseProto response) {
this.response = response;
}
@Override
- public PathCacheEntry get(int i) {
- ListPathCacheEntriesElementProto elementProto =
+ public PathBasedCacheEntry get(int i) {
+ ListPathBasedCacheEntriesElementProto elementProto =
response.getElements(i);
- return new PathCacheEntry(elementProto.getId(),
- new PathCacheDirective(elementProto.getPath(),
+ return new PathBasedCacheEntry(elementProto.getId(),
+ new PathBasedCacheDirective(elementProto.getPath(),
elementProto.getPool()));
}
@@ -1139,45 +1138,48 @@ public class ClientNamenodeProtocolTrans
}
}
- private class PathCacheEntriesIterator
- extends BatchedRemoteIterator<Long, PathCacheEntry> {
+ private class PathBasedCacheEntriesIterator
+ extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
private final String pool;
+ private final String path;
- public PathCacheEntriesIterator(long prevKey, String pool) {
+ public PathBasedCacheEntriesIterator(long prevKey, String pool, String path) {
super(prevKey);
this.pool = pool;
+ this.path = path;
}
@Override
- public BatchedEntries<PathCacheEntry> makeRequest(
+ public BatchedEntries<PathBasedCacheEntry> makeRequest(
Long nextKey) throws IOException {
- ListPathCacheEntriesResponseProto response;
+ ListPathBasedCacheEntriesResponseProto response;
try {
- ListPathCacheEntriesRequestProto req =
- ListPathCacheEntriesRequestProto.newBuilder().
- setPrevId(nextKey).
- setPool(pool).
- build();
- response = rpcProxy.listPathCacheEntries(null, req);
- if (response.getElementsCount() == 0) {
- response = null;
+ ListPathBasedCacheEntriesRequestProto.Builder builder =
+ ListPathBasedCacheEntriesRequestProto.newBuilder().setPrevId(nextKey);
+ if (pool != null) {
+ builder.setPool(pool);
}
+ if (path != null) {
+ builder.setPath(path);
+ }
+ ListPathBasedCacheEntriesRequestProto req = builder.build();
+ response = rpcProxy.listPathBasedCacheEntries(null, req);
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
- return new BatchedPathCacheEntries(response);
+ return new BatchedPathBasedCacheEntries(response);
}
@Override
- public Long elementToPrevKey(PathCacheEntry element) {
+ public Long elementToPrevKey(PathBasedCacheEntry element) {
return element.getEntryId();
}
}
@Override
- public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
- String pool) throws IOException {
- return new PathCacheEntriesIterator(prevId, pool);
+ public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+ String pool, String path) throws IOException {
+ return new PathBasedCacheEntriesIterator(prevId, pool, path);
}
@Override
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java Thu Sep 12 03:55:10 2013
@@ -35,16 +35,17 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.InvalidPoolNameError;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.UnexpectedAddPathCacheDirectiveException;
-import org.apache.hadoop.hdfs.protocol.AddPathCacheDirectiveException.PoolWritePermissionDeniedError;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.InvalidIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.NoSuchIdException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.UnexpectedRemovePathCacheEntryException;
-import org.apache.hadoop.hdfs.protocol.RemovePathCacheEntryException.RemovePermissionDeniedException;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
+import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
+import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
import org.apache.hadoop.util.Fallible;
/**
@@ -56,17 +57,17 @@ final class CacheManager {
/**
* Cache entries, sorted by ID.
*
- * listPathCacheEntries relies on the ordering of elements in this map
+ * listPathBasedCacheEntries relies on the ordering of elements in this map
* to track what has already been listed by the client.
*/
- private final TreeMap<Long, PathCacheEntry> entriesById =
- new TreeMap<Long, PathCacheEntry>();
+ private final TreeMap<Long, PathBasedCacheEntry> entriesById =
+ new TreeMap<Long, PathBasedCacheEntry>();
/**
* Cache entries, sorted by directive.
*/
- private final TreeMap<PathCacheDirective, PathCacheEntry> entriesByDirective =
- new TreeMap<PathCacheDirective, PathCacheEntry>();
+ private final TreeMap<PathBasedCacheDirective, PathBasedCacheEntry> entriesByDirective =
+ new TreeMap<PathBasedCacheDirective, PathBasedCacheEntry>();
/**
* Cache pools, sorted by name.
@@ -114,53 +115,53 @@ final class CacheManager {
return nextEntryId++;
}
- private synchronized Fallible<PathCacheEntry> addDirective(
- PathCacheDirective directive, FSPermissionChecker pc) {
+ private synchronized Fallible<PathBasedCacheEntry> addDirective(
+ PathBasedCacheDirective directive, FSPermissionChecker pc) {
CachePool pool = cachePools.get(directive.getPool());
if (pool == null) {
LOG.info("addDirective " + directive + ": pool not found.");
- return new Fallible<PathCacheEntry>(
+ return new Fallible<PathBasedCacheEntry>(
new InvalidPoolNameError(directive));
}
if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
LOG.info("addDirective " + directive + ": write permission denied.");
- return new Fallible<PathCacheEntry>(
+ return new Fallible<PathBasedCacheEntry>(
new PoolWritePermissionDeniedError(directive));
}
try {
directive.validate();
} catch (IOException ioe) {
LOG.info("addDirective " + directive + ": validation failed.");
- return new Fallible<PathCacheEntry>(ioe);
+ return new Fallible<PathBasedCacheEntry>(ioe);
}
// Check if we already have this entry.
- PathCacheEntry existing = entriesByDirective.get(directive);
+ PathBasedCacheEntry existing = entriesByDirective.get(directive);
if (existing != null) {
// Entry already exists: return existing entry.
LOG.info("addDirective " + directive + ": there is an " +
"existing directive " + existing);
- return new Fallible<PathCacheEntry>(existing);
+ return new Fallible<PathBasedCacheEntry>(existing);
}
// Add a new entry with the next available ID.
- PathCacheEntry entry;
+ PathBasedCacheEntry entry;
try {
- entry = new PathCacheEntry(getNextEntryId(), directive);
+ entry = new PathBasedCacheEntry(getNextEntryId(), directive);
} catch (IOException ioe) {
- return new Fallible<PathCacheEntry>(
- new UnexpectedAddPathCacheDirectiveException(directive));
+ return new Fallible<PathBasedCacheEntry>(
+ new UnexpectedAddPathBasedCacheDirectiveException(directive));
}
LOG.info("addDirective " + directive + ": added cache directive "
+ directive);
entriesByDirective.put(directive, entry);
entriesById.put(entry.getEntryId(), entry);
- return new Fallible<PathCacheEntry>(entry);
+ return new Fallible<PathBasedCacheEntry>(entry);
}
- public synchronized List<Fallible<PathCacheEntry>> addDirectives(
- List<PathCacheDirective> directives, FSPermissionChecker pc) {
- ArrayList<Fallible<PathCacheEntry>> results =
- new ArrayList<Fallible<PathCacheEntry>>(directives.size());
- for (PathCacheDirective directive: directives) {
+ public synchronized List<Fallible<PathBasedCacheEntry>> addDirectives(
+ List<PathBasedCacheDirective> directives, FSPermissionChecker pc) {
+ ArrayList<Fallible<PathBasedCacheEntry>> results =
+ new ArrayList<Fallible<PathBasedCacheEntry>>(directives.size());
+ for (PathBasedCacheDirective directive: directives) {
results.add(addDirective(directive, pc));
}
return results;
@@ -174,7 +175,7 @@ final class CacheManager {
return new Fallible<Long>(new InvalidIdException(entryId));
}
// Find the entry.
- PathCacheEntry existing = entriesById.get(entryId);
+ PathBasedCacheEntry existing = entriesById.get(entryId);
if (existing == null) {
LOG.info("removeEntry " + entryId + ": entry not found.");
return new Fallible<Long>(new NoSuchIdException(entryId));
@@ -184,7 +185,7 @@ final class CacheManager {
LOG.info("removeEntry " + entryId + ": pool not found for directive " +
existing.getDirective());
return new Fallible<Long>(
- new UnexpectedRemovePathCacheEntryException(entryId));
+ new UnexpectedRemovePathBasedCacheEntryException(entryId));
}
if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
LOG.info("removeEntry " + entryId + ": write permission denied to " +
@@ -198,7 +199,7 @@ final class CacheManager {
LOG.warn("removeEntry " + entryId + ": failed to find existing entry " +
existing + " in entriesByDirective");
return new Fallible<Long>(
- new UnexpectedRemovePathCacheEntryException(entryId));
+ new UnexpectedRemovePathBasedCacheEntryException(entryId));
}
entriesById.remove(entryId);
return new Fallible<Long>(entryId);
@@ -214,33 +215,44 @@ final class CacheManager {
return results;
}
- public synchronized BatchedListEntries<PathCacheEntry>
- listPathCacheEntries(long prevId, String filterPool, FSPermissionChecker pc) {
+ public synchronized BatchedListEntries<PathBasedCacheEntry>
+ listPathBasedCacheEntries(long prevId, String filterPool,
+ String filterPath, FSPermissionChecker pc) throws IOException {
final int NUM_PRE_ALLOCATED_ENTRIES = 16;
- ArrayList<PathCacheEntry> replies =
- new ArrayList<PathCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
+ if (filterPath != null) {
+ if (!DFSUtil.isValidName(filterPath)) {
+ throw new IOException("invalid path name '" + filterPath + "'");
+ }
+ }
+ ArrayList<PathBasedCacheEntry> replies =
+ new ArrayList<PathBasedCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
int numReplies = 0;
- SortedMap<Long, PathCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
- for (Entry<Long, PathCacheEntry> cur : tailMap.entrySet()) {
+ SortedMap<Long, PathBasedCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
+ for (Entry<Long, PathBasedCacheEntry> cur : tailMap.entrySet()) {
if (numReplies >= maxListCacheDirectivesResponses) {
- return new BatchedListEntries<PathCacheEntry>(replies, true);
+ return new BatchedListEntries<PathBasedCacheEntry>(replies, true);
}
- PathCacheEntry curEntry = cur.getValue();
- if (!filterPool.isEmpty() &&
- !cur.getValue().getDirective().getPool().equals(filterPool)) {
+ PathBasedCacheEntry curEntry = cur.getValue();
+ PathBasedCacheDirective directive = cur.getValue().getDirective();
+ if (filterPool != null &&
+ !directive.getPool().equals(filterPool)) {
+ continue;
+ }
+ if (filterPath != null &&
+ !directive.getPath().equals(filterPath)) {
continue;
}
CachePool pool = cachePools.get(curEntry.getDirective().getPool());
if (pool == null) {
- LOG.error("invalid pool for PathCacheEntry " + curEntry);
+ LOG.error("invalid pool for PathBasedCacheEntry " + curEntry);
continue;
}
- if (pc.checkPermission(pool, FsAction.EXECUTE)) {
+ if (pc.checkPermission(pool, FsAction.READ)) {
replies.add(cur.getValue());
numReplies++;
}
}
- return new BatchedListEntries<PathCacheEntry>(replies, false);
+ return new BatchedListEntries<PathBasedCacheEntry>(replies, false);
}
/**
@@ -300,8 +312,7 @@ final class CacheManager {
}
if (info.getMode() != null) {
pool.setMode(info.getMode());
- bld.append(prefix).
- append(String.format("set mode to 0%3o", info.getMode()));
+ bld.append(prefix).append("set mode to " + info.getMode());
prefix = "; ";
}
if (info.getWeight() != null) {
@@ -334,10 +345,10 @@ final class CacheManager {
// Remove entries using this pool
// TODO: could optimize this somewhat to avoid the need to iterate
// over all entries in entriesByDirective
- Iterator<Entry<PathCacheDirective, PathCacheEntry>> iter =
+ Iterator<Entry<PathBasedCacheDirective, PathBasedCacheEntry>> iter =
entriesByDirective.entrySet().iterator();
while (iter.hasNext()) {
- Entry<PathCacheDirective, PathCacheEntry> entry = iter.next();
+ Entry<PathBasedCacheDirective, PathBasedCacheEntry> entry = iter.next();
if (entry.getKey().getPool().equals(poolName)) {
entriesById.remove(entry.getValue().getEntryId());
iter.remove();
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CachePool.java Thu Sep 12 03:55:10 2013
@@ -51,6 +51,14 @@ public final class CachePool {
@Nonnull
private String groupName;
+ /**
+ * Cache pool permissions.
+ *
+ * READ permission means that you can list the cache directives in this pool.
+ * WRITE permission means that you can add, remove, or modify cache directives
+ * in this pool.
+ * EXECUTE permission is unused.
+ */
@Nonnull
private FsPermission mode;
@@ -74,7 +82,7 @@ public final class CachePool {
}
this.groupName = ugi.getPrimaryGroupName();
} else {
- this.groupName = ownerName;
+ this.groupName = groupName;
}
this.mode = mode != null ?
new FsPermission(mode): FsPermission.getCachePoolDefault();
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Thu Sep 12 03:55:10 2013
@@ -143,8 +143,8 @@ import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.Block;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -6750,27 +6750,27 @@ public class FSNamesystem implements Nam
}
@SuppressWarnings("unchecked")
- List<Fallible<PathCacheEntry>> addPathCacheDirectives(
- List<PathCacheDirective> directives) throws IOException {
+ List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+ List<PathBasedCacheDirective> directives) throws IOException {
CacheEntryWithPayload retryCacheEntry =
RetryCache.waitForCompletion(retryCache, null);
if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
- return (List<Fallible<PathCacheEntry>>) retryCacheEntry.getPayload();
+ return (List<Fallible<PathBasedCacheEntry>>) retryCacheEntry.getPayload();
}
final FSPermissionChecker pc = isPermissionEnabled ?
getPermissionChecker() : null;
boolean success = false;
- List<Fallible<PathCacheEntry>> results = null;
+ List<Fallible<PathBasedCacheEntry>> results = null;
checkOperation(OperationCategory.WRITE);
writeLock();
try {
checkOperation(OperationCategory.WRITE);
if (isInSafeMode()) {
throw new SafeModeException(
- "Cannot add path cache directive", safeMode);
+ "Cannot add PathBasedCache directive", safeMode);
}
results = cacheManager.addDirectives(directives, pc);
- //getEditLog().logAddPathCacheDirectives(results); FIXME: HDFS-5119
+ //getEditLog().logAddPathBasedCacheDirectives(results); FIXME: HDFS-5119
success = true;
} finally {
writeUnlock();
@@ -6778,7 +6778,7 @@ public class FSNamesystem implements Nam
getEditLog().logSync();
}
if (isAuditEnabled() && isExternalInvocation()) {
- logAuditEvent(success, "addPathCacheDirectives", null, null, null);
+ logAuditEvent(success, "addPathBasedCacheDirectives", null, null, null);
}
RetryCache.setState(retryCacheEntry, success, results);
}
@@ -6786,7 +6786,7 @@ public class FSNamesystem implements Nam
}
@SuppressWarnings("unchecked")
- List<Fallible<Long>> removePathCacheEntries(List<Long> ids) throws IOException {
+ List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids) throws IOException {
CacheEntryWithPayload retryCacheEntry =
RetryCache.waitForCompletion(retryCache, null);
if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
@@ -6802,15 +6802,15 @@ public class FSNamesystem implements Nam
checkOperation(OperationCategory.WRITE);
if (isInSafeMode()) {
throw new SafeModeException(
- "Cannot remove path cache directives", safeMode);
+ "Cannot remove PathBasedCache directives", safeMode);
}
results = cacheManager.removeEntries(ids, pc);
- //getEditLog().logRemovePathCacheEntries(results); FIXME: HDFS-5119
+ //getEditLog().logRemovePathBasedCacheEntries(results); FIXME: HDFS-5119
success = true;
} finally {
writeUnlock();
if (isAuditEnabled() && isExternalInvocation()) {
- logAuditEvent(success, "removePathCacheEntries", null, null, null);
+ logAuditEvent(success, "removePathBasedCacheEntries", null, null, null);
}
RetryCache.setState(retryCacheEntry, success, results);
}
@@ -6818,22 +6818,22 @@ public class FSNamesystem implements Nam
return results;
}
- BatchedListEntries<PathCacheEntry> listPathCacheEntries(long startId,
- String pool) throws IOException {
+ BatchedListEntries<PathBasedCacheEntry> listPathBasedCacheEntries(long startId,
+ String pool, String path) throws IOException {
final FSPermissionChecker pc = isPermissionEnabled ?
getPermissionChecker() : null;
- BatchedListEntries<PathCacheEntry> results;
+ BatchedListEntries<PathBasedCacheEntry> results;
checkOperation(OperationCategory.READ);
readLock();
boolean success = false;
try {
checkOperation(OperationCategory.READ);
- results = cacheManager.listPathCacheEntries(startId, pool, pc);
+ results = cacheManager.listPathBasedCacheEntries(startId, pool, path, pc);
success = true;
} finally {
readUnlock();
if (isAuditEnabled() && isExternalInvocation()) {
- logAuditEvent(success, "listPathCacheEntries", null, null, null);
+ logAuditEvent(success, "listPathBasedCacheEntries", null, null, null);
}
}
return results;
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java?rev=1522272&r1=1522271&r2=1522272&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java Thu Sep 12 03:55:10 2013
@@ -62,8 +62,8 @@ import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
-import org.apache.hadoop.hdfs.protocol.PathCacheDirective;
-import org.apache.hadoop.hdfs.protocol.PathCacheEntry;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
+import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -1211,43 +1211,47 @@ class NameNodeRpcServer implements Namen
}
@Override
- public List<Fallible<PathCacheEntry>> addPathCacheDirectives(
- List<PathCacheDirective> paths) throws IOException {
- return namesystem.addPathCacheDirectives(paths);
+ public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
+ List<PathBasedCacheDirective> paths) throws IOException {
+ return namesystem.addPathBasedCacheDirectives(paths);
}
@Override
- public List<Fallible<Long>> removePathCacheEntries(List<Long> ids)
+ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
throws IOException {
- return namesystem.removePathCacheEntries(ids);
+ return namesystem.removePathBasedCacheEntries(ids);
}
- private class ServerSidePathCacheEntriesIterator
- extends BatchedRemoteIterator<Long, PathCacheEntry> {
+ private class ServerSidePathBasedCacheEntriesIterator
+ extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
private final String pool;
- public ServerSidePathCacheEntriesIterator(Long firstKey, String pool) {
+ private final String path;
+
+ public ServerSidePathBasedCacheEntriesIterator(Long firstKey, String pool,
+ String path) {
super(firstKey);
this.pool = pool;
+ this.path = path;
}
@Override
- public BatchedEntries<PathCacheEntry> makeRequest(
+ public BatchedEntries<PathBasedCacheEntry> makeRequest(
Long nextKey) throws IOException {
- return namesystem.listPathCacheEntries(nextKey, pool);
+ return namesystem.listPathBasedCacheEntries(nextKey, pool, path);
}
@Override
- public Long elementToPrevKey(PathCacheEntry entry) {
+ public Long elementToPrevKey(PathBasedCacheEntry entry) {
return entry.getEntryId();
}
}
@Override
- public RemoteIterator<PathCacheEntry> listPathCacheEntries(long prevId,
- String pool) throws IOException {
- return new ServerSidePathCacheEntriesIterator(prevId, pool);
+ public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
+ String pool, String path) throws IOException {
+ return new ServerSidePathBasedCacheEntriesIterator(prevId, pool, path);
}
@Override