You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2014/11/12 04:47:26 UTC
[04/25] hadoop git commit: HDFS-7383.
DataNode.requestShortCircuitFdsForRead may throw NullPointerException.
Contributed by Tsz Wo Nicholas Sze.
HDFS-7383. DataNode.requestShortCircuitFdsForRead may throw NullPointerException. Contributed by Tsz Wo Nicholas Sze.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4ddc5cad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4ddc5cad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4ddc5cad
Branch: refs/heads/HDFS-EC
Commit: 4ddc5cad0a4175f7f5ef9504a7365601dc7e63b4
Parents: a37a993
Author: Suresh Srinivas <su...@yahoo-inc.com>
Authored: Sun Nov 9 17:55:03 2014 -0800
Committer: Suresh Srinivas <su...@yahoo-inc.com>
Committed: Sun Nov 9 17:55:03 2014 -0800
----------------------------------------------------------------------
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++
.../hadoop/hdfs/server/datanode/DataNode.java | 2 +-
.../hdfs/server/datanode/DatanodeUtil.java | 21 ++++++++++++++++++++
.../datanode/fsdataset/impl/FsDatasetCache.java | 4 ++--
4 files changed, 27 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ddc5cad/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 6bde9bc..af18379 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -407,6 +407,9 @@ Release 2.7.0 - UNRELEASED
HDFS-7366. BlockInfo should take replication as an short in the constructor.
(Li Lu via wheat9)
+ HDFS-7383. DataNode.requestShortCircuitFdsForRead may throw
+ NullPointerException. (szetszwo via suresh)
+
Release 2.6.0 - UNRELEASED
INCOMPATIBLE CHANGES
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ddc5cad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 6bd27fa..adfbaf3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -1543,7 +1543,7 @@ public class DataNode extends ReconfigurableBase
try {
fis[0] = (FileInputStream)data.getBlockInputStream(blk, 0);
- fis[1] = (FileInputStream)data.getMetaDataInputStream(blk).getWrappedStream();
+ fis[1] = DatanodeUtil.getMetaDataInputStream(blk, data);
} catch (ClassCastException e) {
LOG.debug("requestShortCircuitFdsForRead failed", e);
throw new ShortCircuitFdsUnsupportedException("This DataNode's " +
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ddc5cad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java
index bd1ba2f..746c3f6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java
@@ -18,10 +18,15 @@
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
+import org.apache.hadoop.hdfs.server.datanode.fsdataset.LengthInputStream;
/** Provide utility methods for Datanode. */
@InterfaceAudience.Private
@@ -114,4 +119,20 @@ public class DatanodeUtil {
DataStorage.BLOCK_SUBDIR_PREFIX + d2;
return new File(root, path);
}
+
+ /**
+ * @return the FileInputStream for the meta data of the given block.
+ * @throws FileNotFoundException
+ * if the file not found.
+ * @throws ClassCastException
+ * if the underlying input stream is not a FileInputStream.
+ */
+ public static FileInputStream getMetaDataInputStream(
+ ExtendedBlock b, FsDatasetSpi<?> data) throws IOException {
+ final LengthInputStream lin = data.getMetaDataInputStream(b);
+ if (lin == null) {
+ throw new FileNotFoundException("Meta file for " + b + " not found.");
+ }
+ return (FileInputStream)lin.getWrappedStream();
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ddc5cad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java
index 4acfc8f..c6408e6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.ExtendedBlockId;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
+import org.apache.hadoop.hdfs.server.datanode.DatanodeUtil;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
@@ -373,8 +374,7 @@ public class FsDatasetCache {
reservedBytes = true;
try {
blockIn = (FileInputStream)dataset.getBlockInputStream(extBlk, 0);
- metaIn = (FileInputStream)dataset.getMetaDataInputStream(extBlk)
- .getWrappedStream();
+ metaIn = DatanodeUtil.getMetaDataInputStream(extBlk, dataset);
} catch (ClassCastException e) {
LOG.warn("Failed to cache " + key +
": Underlying blocks are not backed by files.", e);