You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sh...@apache.org on 2011/01/13 04:41:32 UTC
svn commit: r1058402 - in /hadoop/hdfs/trunk: CHANGES.txt
src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java
Author: shv
Date: Thu Jan 13 03:41:32 2011
New Revision: 1058402
URL: http://svn.apache.org/viewvc?rev=1058402&view=rev
Log:
HDFS-884. DataNode throws IOException if all data directories are unavailable. Contributed by Steve Loughran and Konstantin Shvachko.
Modified:
hadoop/hdfs/trunk/CHANGES.txt
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java
Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=1058402&r1=1058401&r2=1058402&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Thu Jan 13 03:41:32 2011
@@ -485,6 +485,9 @@ Release 0.22.0 - Unreleased
HDFS-1504. FSImageSaver should catch all exceptions, not just IOE. (todd)
+ HDFS-884. DataNode throws IOException if all data directories are
+ unavailable. (Steve Loughran and shv)
+
Release 0.21.1 - Unreleased
IMPROVEMENTS
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1058402&r1=1058401&r2=1058402&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Thu Jan 13 03:41:32 2011
@@ -1571,21 +1571,19 @@ public class DataNode extends Configured
DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT));
ArrayList<File> dirs = getDataDirsFromURIs(dataDirs, localFS, permission);
- if (dirs.size() > 0) {
- return new DataNode(conf, dirs, resources);
- }
- LOG.error("All directories in "
- + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY + " are invalid.");
- return null;
+ assert dirs.size() > 0 : "number of data directories should be > 0";
+ return new DataNode(conf, dirs, resources);
}
// DataNode ctor expects AbstractList instead of List or Collection...
static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
- LocalFileSystem localFS, FsPermission permission) {
+ LocalFileSystem localFS, FsPermission permission) throws IOException {
ArrayList<File> dirs = new ArrayList<File>();
+ StringBuilder invalidDirs = new StringBuilder();
for (URI dirURI : dataDirs) {
if (!"file".equalsIgnoreCase(dirURI.getScheme())) {
LOG.warn("Unsupported URI schema in " + dirURI + ". Ignoring ...");
+ invalidDirs.append("\"").append(dirURI).append("\" ");
continue;
}
// drop any (illegal) authority in the URI for backwards compatibility
@@ -1595,10 +1593,14 @@ public class DataNode extends Configured
dirs.add(data);
} catch (IOException e) {
LOG.warn("Invalid directory in: "
- + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY + ": "
- + e.getMessage());
+ + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY + ": ", e);
+ invalidDirs.append("\"").append(data.getCanonicalPath()).append("\" ");
}
}
+ if (dirs.size() == 0)
+ throw new IOException("All directories in "
+ + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY + " are invalid: "
+ + invalidDirs);
return dirs;
}
Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java?rev=1058402&r1=1058401&r2=1058402&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeConfig.java Thu Jan 13 03:41:32 2011
@@ -76,7 +76,14 @@ public class TestDatanodeConfig {
// 1. Test unsupported schema. Only "file:" is supported.
String dnDir = makeURI("shv", null, fileAsURI(dataDir).getPath());
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dnDir);
- DataNode dn = DataNode.createDataNode(new String[]{}, conf);
+ DataNode dn = null;
+ try {
+ dn = DataNode.createDataNode(new String[]{}, conf);
+ } catch(IOException e) {
+ // expecting exception here
+ }
+ if(dn != null)
+ dn.shutdown();
assertNull("Data-node startup should have failed.", dn);
// 2. Test "file:" schema and no schema (path-only). Both should work.