You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cm...@apache.org on 2013/10/19 00:15:27 UTC
svn commit: r1533651 - in
/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/server/namenode/
src/main/java/org/apache/hadoop/hdfs/tools/
src/test/java/org/apache/hadoop/hdfs/server/namenode/ src/...
Author: cmccabe
Date: Fri Oct 18 22:15:26 2013
New Revision: 1533651
URL: http://svn.apache.org/r1533651
Log:
HDFS-5203. Concurrent clients that add a cache directive on the same path may prematurely uncache each other. (Chris Nauroth via Colin Patrick McCabe)
Modified:
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java
hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt Fri Oct 18 22:15:26 2013
@@ -104,3 +104,7 @@ HDFS-4949 (Unreleased)
HDFS-5388. Loading fsimage fails to find cache pools during namenode
startup. (Chris Nauroth via Colin Patrick McCabe)
+
+ HDFS-5203. Concurrent clients that add a cache directive on the same path
+ may prematurely uncache from each other. (Chris Nauroth via Colin Patrick
+ McCabe)
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java Fri Oct 18 22:15:26 2013
@@ -266,21 +266,6 @@ public final class CacheManager {
return nextEntryId++;
}
- private PathBasedCacheEntry findEntry(PathBasedCacheDirective directive) {
- assert namesystem.hasReadOrWriteLock();
- List<PathBasedCacheEntry> existing =
- entriesByPath.get(directive.getPath().toUri().getPath());
- if (existing == null) {
- return null;
- }
- for (PathBasedCacheEntry entry : existing) {
- if (entry.getPool().getPoolName().equals(directive.getPool())) {
- return entry;
- }
- }
- return null;
- }
-
public PathBasedCacheDescriptor addDirective(
PathBasedCacheDirective directive, FSPermissionChecker pc)
throws IOException {
@@ -302,13 +287,6 @@ public final class CacheManager {
throw ioe;
}
- // Check if we already have this entry.
- PathBasedCacheEntry existing = findEntry(directive);
- if (existing != null) {
- LOG.info("addDirective " + directive + ": there is an " +
- "existing directive " + existing + " in this pool.");
- return existing.getDescriptor();
- }
// Add a new entry with the next available ID.
PathBasedCacheEntry entry;
try {
@@ -405,7 +383,7 @@ public final class CacheManager {
continue;
}
if (filterPath != null &&
- !directive.getPath().equals(filterPath)) {
+ !directive.getPath().toUri().getPath().equals(filterPath)) {
continue;
}
if (pc.checkPermission(curEntry.getPool(), FsAction.READ)) {
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Fri Oct 18 22:15:26 2013
@@ -6978,7 +6978,7 @@ public class FSNamesystem implements Nam
} finally {
writeUnlock();
if (isAuditEnabled() && isExternalInvocation()) {
- logAuditEvent(success, "removePathBasedCacheDescriptors", null, null,
+ logAuditEvent(success, "removePathBasedCacheDescriptor", null, null,
null);
}
RetryCache.setState(cacheEntry, success);
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java Fri Oct 18 22:15:26 2013
@@ -207,10 +207,10 @@ public class CacheAdmin extends Configur
@Override
public String getLongUsage() {
TableListing listing = getOptionDescriptionListing();
- listing.addRow("<id>", "The id of the cache directive to remove. " +
+ listing.addRow("<id>", "The id of the cache directive to remove. " +
"You must have write permission on the pool of the " +
"directive in order to remove it. To see a list " +
- "of PathBasedCache directive IDs, use the -list command.");
+ "of PathBasedCache directive IDs, use the -listDirectives command.");
return getShortUsage() + "\n" +
"Remove a cache directive.\n\n" +
listing.toString();
@@ -253,6 +253,64 @@ public class CacheAdmin extends Configur
}
}
+ private static class RemovePathBasedCacheDirectivesCommand implements Command {
+ @Override
+ public String getName() {
+ return "-removeDirectives";
+ }
+
+ @Override
+ public String getShortUsage() {
+ return "[" + getName() + " <path>]\n";
+ }
+
+ @Override
+ public String getLongUsage() {
+ TableListing listing = getOptionDescriptionListing();
+ listing.addRow("<path>", "The path of the cache directives to remove. " +
+ "You must have write permission on the pool of the directive in order " +
+ "to remove it. To see a list of cache directives, use the " +
+ "-listDirectives command.");
+ return getShortUsage() + "\n" +
+ "Remove every cache directive with the specified path.\n\n" +
+ listing.toString();
+ }
+
+ @Override
+ public int run(Configuration conf, List<String> args) throws IOException {
+ String path = StringUtils.popOptionWithArgument("-path", args);
+ if (path == null) {
+ System.err.println("You must specify a path with -path.");
+ return 1;
+ }
+ if (!args.isEmpty()) {
+ System.err.println("Can't understand argument: " + args.get(0));
+ System.err.println("Usage is " + getShortUsage());
+ return 1;
+ }
+ DistributedFileSystem dfs = getDFS(conf);
+ RemoteIterator<PathBasedCacheDescriptor> iter =
+ dfs.listPathBasedCacheDescriptors(null, new Path(path));
+ int exitCode = 0;
+ while (iter.hasNext()) {
+ PathBasedCacheDescriptor entry = iter.next();
+ try {
+ dfs.removePathBasedCacheDescriptor(entry);
+ System.out.println("Removed PathBasedCache directive " +
+ entry.getEntryId());
+ } catch (RemovePathBasedCacheDescriptorException e) {
+ System.err.println(prettifyException(e));
+ exitCode = 2;
+ }
+ }
+ if (exitCode == 0) {
+ System.out.println("Removed every PathBasedCache directive with path " +
+ path);
+ }
+ return exitCode;
+ }
+ }
+
private static class ListPathBasedCacheDirectiveCommand implements Command {
@Override
public String getName() {
@@ -684,6 +742,7 @@ public class CacheAdmin extends Configur
private static Command[] COMMANDS = {
new AddPathBasedCacheDirectiveCommand(),
new RemovePathBasedCacheDirectiveCommand(),
+ new RemovePathBasedCacheDirectivesCommand(),
new ListPathBasedCacheDirectiveCommand(),
new AddCachePoolCommand(),
new ModifyCachePoolCommand(),
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java Fri Oct 18 22:15:26 2013
@@ -344,8 +344,9 @@ public class TestPathBasedCacheRequests
PathBasedCacheDescriptor alphaD = addAsUnprivileged(alpha);
PathBasedCacheDescriptor alphaD2 = addAsUnprivileged(alpha);
- assertEquals("Expected to get the same descriptor when re-adding"
- + "an existing PathBasedCacheDirective", alphaD, alphaD2);
+ assertFalse("Expected to get unique descriptors when re-adding an "
+ + "existing PathBasedCacheDirective",
+ alphaD.getEntryId() == alphaD2.getEntryId());
PathBasedCacheDescriptor betaD = addAsUnprivileged(beta);
try {
@@ -404,11 +405,11 @@ public class TestPathBasedCacheRequests
RemoteIterator<PathBasedCacheDescriptor> iter;
iter = dfs.listPathBasedCacheDescriptors(null, null);
- validateListAll(iter, alphaD, betaD, deltaD, relativeD);
+ validateListAll(iter, alphaD, alphaD2, betaD, deltaD, relativeD);
iter = dfs.listPathBasedCacheDescriptors("pool3", null);
Assert.assertFalse(iter.hasNext());
iter = dfs.listPathBasedCacheDescriptors("pool1", null);
- validateListAll(iter, alphaD, deltaD, relativeD);
+ validateListAll(iter, alphaD, alphaD2, deltaD, relativeD);
iter = dfs.listPathBasedCacheDescriptors("pool2", null);
validateListAll(iter, betaD);
@@ -437,6 +438,7 @@ public class TestPathBasedCacheRequests
}
dfs.removePathBasedCacheDescriptor(alphaD);
+ dfs.removePathBasedCacheDescriptor(alphaD2);
dfs.removePathBasedCacheDescriptor(deltaD);
dfs.removePathBasedCacheDescriptor(relativeD);
iter = dfs.listPathBasedCacheDescriptors(null, null);
@@ -652,6 +654,7 @@ public class TestPathBasedCacheRequests
@Test(timeout=120000)
public void testAddingPathBasedCacheDirectivesWhenCachingIsDisabled()
throws Exception {
+ Assume.assumeTrue(canTestDatanodeCaching());
HdfsConfiguration conf = createCachingConf();
conf.setBoolean(DFS_NAMENODE_CACHING_ENABLED_KEY, false);
MiniDFSCluster cluster =
Modified: hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml?rev=1533651&r1=1533650&r2=1533651&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml Fri Oct 18 22:15:26 2013
@@ -212,5 +212,151 @@
</comparators>
</test>
+ <test> <!--Tested -->
+ <description>Testing listing directives filtered by pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /baz -pool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /buz -pool pool2</cache-admin-command>
+ <cache-admin-command>-listDirectives -pool pool2</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ <cache-admin-command>-removePool pool2</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 2 entries</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>8 pool2 /baz</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>9 pool2 /buz</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing listing directives filtered by path</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool2</cache-admin-command>
+ <cache-admin-command>-listDirectives -path /foo</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ <cache-admin-command>-removePool pool2</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 2 entries</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>10 pool1 /foo</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>12 pool2 /foo</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing listing directives filtered by path and pool</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool2</cache-admin-command>
+ <cache-admin-command>-listDirectives -path /foo -pool pool2</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ <cache-admin-command>-removePool pool2</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 1 entry</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>16 pool2 /foo</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing removing a directive</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-removeDirective 18</cache-admin-command>
+ <cache-admin-command>-listDirectives</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 1 entry</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>19 pool1 /bar</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!--Tested -->
+ <description>Testing removing every directive for a path</description>
+ <test-commands>
+ <cache-admin-command>-addPool pool1</cache-admin-command>
+ <cache-admin-command>-addPool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
+ <cache-admin-command>-addDirective -path /foo -pool pool2</cache-admin-command>
+ <cache-admin-command>-addDirective -path /bar -pool pool2</cache-admin-command>
+ <cache-admin-command>-removeDirectives -path /foo</cache-admin-command>
+ <cache-admin-command>-listDirectives</cache-admin-command>
+ </test-commands>
+ <cleanup-commands>
+ <cache-admin-command>-removePool pool1</cache-admin-command>
+ <cache-admin-command>-removePool pool2</cache-admin-command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>Found 2 entries</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>22 pool1 /bar</expected-output>
+ </comparator>
+ <comparator>
+ <type>SubstringComparator</type>
+ <expected-output>24 pool2 /bar</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
</tests>
</configuration>