You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/01/16 01:10:33 UTC
[07/43] hadoop git commit: HDFS-9569. Log the name of the fsimage
being loaded for better supportability. (Yongjun Zhang)
HDFS-9569. Log the name of the fsimage being loaded for better supportability. (Yongjun Zhang)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/25051c3b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/25051c3b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/25051c3b
Branch: refs/heads/HDFS-1312
Commit: 25051c3bd08efc12333a6acb51782cc7800403a4
Parents: 13de835
Author: Yongjun Zhang <yz...@cloudera.com>
Authored: Tue Jan 12 09:22:22 2016 -0800
Committer: Yongjun Zhang <yz...@cloudera.com>
Committed: Tue Jan 12 09:22:22 2016 -0800
----------------------------------------------------------------------
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++
.../hadoop/hdfs/server/namenode/FSImage.java | 14 +++++--
.../hdfs/server/namenode/FSImageFormat.java | 21 ++++++-----
.../namenode/IllegalReservedPathException.java | 39 ++++++++++++++++++++
.../hadoop/hdfs/TestDFSUpgradeFromImage.java | 16 +++++---
5 files changed, 74 insertions(+), 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/25051c3b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f074d90..04060bb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -2600,6 +2600,9 @@ Release 2.7.3 - UNRELEASED
HDFS-9574. Reduce client failures during datanode restart (kihwal)
+ HDFS-9569. Log the name of the fsimage being loaded for better
+ supportability. (Yongjun Zhang)
+
OPTIMIZATIONS
BUG FIXES
http://git-wip-us.apache.org/repos/asf/hadoop/blob/25051c3b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
index dedbb32..084f82a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
@@ -665,14 +665,19 @@ public class FSImage implements Closeable {
LOG.info("No edit log streams selected.");
}
+ Exception le = null;
FSImageFile imageFile = null;
for (int i = 0; i < imageFiles.size(); i++) {
try {
imageFile = imageFiles.get(i);
loadFSImageFile(target, recovery, imageFile, startOpt);
break;
- } catch (IOException ioe) {
- LOG.error("Failed to load image from " + imageFile, ioe);
+ } catch (IllegalReservedPathException ie) {
+ throw new IOException("Failed to load image from " + imageFile,
+ ie);
+ } catch (Exception e) {
+ le = e;
+ LOG.error("Failed to load image from " + imageFile, e);
target.clear();
imageFile = null;
}
@@ -680,7 +685,8 @@ public class FSImage implements Closeable {
// Failed to load any images, error out
if (imageFile == null) {
FSEditLog.closeAllStreams(editStreams);
- throw new IOException("Failed to load an FSImage file!");
+ throw new IOException("Failed to load FSImage file, see error(s) " +
+ "above for more info.");
}
prog.endPhase(Phase.LOADING_FSIMAGE);
@@ -721,7 +727,7 @@ public class FSImage implements Closeable {
void loadFSImageFile(FSNamesystem target, MetaRecoveryContext recovery,
FSImageFile imageFile, StartupOption startupOption) throws IOException {
- LOG.debug("Planning to load image :\n" + imageFile);
+ LOG.info("Planning to load image: " + imageFile);
StorageDirectory sdForProperties = imageFile.sd;
storage.readProperties(sdForProperties, startupOption);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/25051c3b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
index 0b1902f..8c05a2d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
@@ -669,7 +669,8 @@ public class FSImageFormat {
* This method is only used for image loading so that synchronization,
* modification time update and space count update are not needed.
*/
- private void addToParent(INodeDirectory parent, INode child) {
+ private void addToParent(INodeDirectory parent, INode child)
+ throws IllegalReservedPathException {
FSDirectory fsDir = namesystem.dir;
if (parent == fsDir.rootDir) {
child.setLocalName(renameReservedRootComponentOnUpgrade(
@@ -1097,7 +1098,7 @@ public class FSImageFormat {
* @return New path with reserved path components renamed to user value
*/
static String renameReservedPathsOnUpgrade(String path,
- final int layoutVersion) {
+ final int layoutVersion) throws IllegalReservedPathException {
final String oldPath = path;
// If any known LVs aren't supported, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, layoutVersion)) {
@@ -1147,13 +1148,13 @@ public class FSImageFormat {
* byte array path component.
*/
private static byte[] renameReservedComponentOnUpgrade(byte[] component,
- final int layoutVersion) {
+ final int layoutVersion) throws IllegalReservedPathException {
// If the LV doesn't support snapshots, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.SNAPSHOT, layoutVersion)) {
if (Arrays.equals(component, HdfsServerConstants.DOT_SNAPSHOT_DIR_BYTES)) {
- Preconditions.checkArgument(
- renameReservedMap.containsKey(HdfsConstants.DOT_SNAPSHOT_DIR),
- RESERVED_ERROR_MSG);
+ if (!renameReservedMap.containsKey(HdfsConstants.DOT_SNAPSHOT_DIR)) {
+ throw new IllegalReservedPathException(RESERVED_ERROR_MSG);
+ }
component =
DFSUtil.string2Bytes(renameReservedMap
.get(HdfsConstants.DOT_SNAPSHOT_DIR));
@@ -1167,13 +1168,13 @@ public class FSImageFormat {
* byte array path component.
*/
private static byte[] renameReservedRootComponentOnUpgrade(byte[] component,
- final int layoutVersion) {
+ final int layoutVersion) throws IllegalReservedPathException {
// If the LV doesn't support inode IDs, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, layoutVersion)) {
if (Arrays.equals(component, FSDirectory.DOT_RESERVED)) {
- Preconditions.checkArgument(
- renameReservedMap.containsKey(FSDirectory.DOT_RESERVED_STRING),
- RESERVED_ERROR_MSG);
+ if (!renameReservedMap.containsKey(HdfsConstants.DOT_SNAPSHOT_DIR)) {
+ throw new IllegalReservedPathException(RESERVED_ERROR_MSG);
+ }
final String renameString = renameReservedMap
.get(FSDirectory.DOT_RESERVED_STRING);
component =
http://git-wip-us.apache.org/repos/asf/hadoop/blob/25051c3b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/IllegalReservedPathException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/IllegalReservedPathException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/IllegalReservedPathException.java
new file mode 100644
index 0000000..69917e9
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/IllegalReservedPathException.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Thrown when upgrading from software release that doesn't support reserved
+ * path to software release that supports reserved path, and when there is
+ * reserved path name in the Fsimage.
+ */
+@InterfaceAudience.Private
+public class IllegalReservedPathException extends IOException {
+ private static final long serialVersionUID = 1L;
+
+ public IllegalReservedPathException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ public IllegalReservedPathException(String message) {
+ super(message);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/25051c3b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
index 3b3ff93..1ba36f3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.namenode.FSImage;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormat;
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
+import org.apache.hadoop.hdfs.server.namenode.IllegalReservedPathException;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Logger;
@@ -326,7 +327,7 @@ public class TestDFSUpgradeFromImage {
fail("Upgrade did not fail with bad MD5");
} catch (IOException ioe) {
String msg = StringUtils.stringifyException(ioe);
- if (!msg.contains("Failed to load an FSImage file")) {
+ if (!msg.contains("Failed to load FSImage file")) {
throw ioe;
}
int md5failures = appender.countExceptionsWithMessage(
@@ -485,10 +486,15 @@ public class TestDFSUpgradeFromImage {
.format(false)
.startupOption(StartupOption.UPGRADE)
.numDataNodes(0).build();
- } catch (IllegalArgumentException e) {
- GenericTestUtils.assertExceptionContains(
- "reserved path component in this version",
- e);
+ } catch (IOException ioe) {
+ Throwable cause = ioe.getCause();
+ if (cause != null && cause instanceof IllegalReservedPathException) {
+ GenericTestUtils.assertExceptionContains(
+ "reserved path component in this version",
+ cause);
+ } else {
+ throw ioe;
+ }
} finally {
if (cluster != null) {
cluster.shutdown();