You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2014/11/09 07:28:45 UTC

hadoop git commit: HDFS-7382. DataNode in secure mode may throw NullPointerException if client connects before DataNode registers itself with NameNode. Contributed by Chris Nauroth.

Repository: hadoop
Updated Branches:
  refs/heads/trunk 6caa8100d -> 9ba8d8c7e


HDFS-7382. DataNode in secure mode may throw NullPointerException if client connects before DataNode registers itself with NameNode. Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9ba8d8c7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9ba8d8c7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9ba8d8c7

Branch: refs/heads/trunk
Commit: 9ba8d8c7eb65eeb6fe673f04e493d9eedd95a822
Parents: 6caa810
Author: cnauroth <cn...@apache.org>
Authored: Sat Nov 8 22:24:57 2014 -0800
Committer: cnauroth <cn...@apache.org>
Committed: Sat Nov 8 22:24:57 2014 -0800

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt            |  3 +++
 .../datatransfer/sasl/SaslDataTransferServer.java      | 13 +++++++------
 .../hadoop/hdfs/server/datanode/DataXceiver.java       |  3 ++-
 3 files changed, 12 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ba8d8c7/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0595764..6bde9bc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1412,6 +1412,9 @@ Release 2.6.0 - UNRELEASED
 
     HDFS-7226. Fix TestDNFencing.testQueueingWithAppend. (Yongjun Zhang via jing9)
 
+    HDFS-7382. DataNode in secure mode may throw NullPointerException if client
+    connects before DataNode registers itself with NameNode. (cnauroth)
+
 Release 2.5.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ba8d8c7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
index 9f94534..3fa7727 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
@@ -94,12 +94,14 @@ public class SaslDataTransferServer {
    * @param peer connection peer
    * @param underlyingOut connection output stream
    * @param underlyingIn connection input stream
+   * @param int xferPort data transfer port of DataNode accepting connection
    * @param datanodeId ID of DataNode accepting connection
    * @return new pair of streams, wrapped after SASL negotiation
    * @throws IOException for any error
    */
   public IOStreamPair receive(Peer peer, OutputStream underlyingOut,
-      InputStream underlyingIn, DatanodeID datanodeId) throws IOException {
+      InputStream underlyingIn, int xferPort, DatanodeID datanodeId)
+      throws IOException {
     if (dnConf.getEncryptDataTransfer()) {
       LOG.debug(
         "SASL server doing encrypted handshake for peer = {}, datanodeId = {}",
@@ -110,16 +112,16 @@ public class SaslDataTransferServer {
         "SASL server skipping handshake in unsecured configuration for "
         + "peer = {}, datanodeId = {}", peer, datanodeId);
       return new IOStreamPair(underlyingIn, underlyingOut);
-    } else if (datanodeId.getXferPort() < 1024) {
+    } else if (xferPort < 1024) {
       LOG.debug(
-        "SASL server skipping handshake in unsecured configuration for "
+        "SASL server skipping handshake in secured configuration for "
         + "peer = {}, datanodeId = {}", peer, datanodeId);
       return new IOStreamPair(underlyingIn, underlyingOut);
     } else if (dnConf.getSaslPropsResolver() != null) {
       LOG.debug(
         "SASL server doing general handshake for peer = {}, datanodeId = {}",
         peer, datanodeId);
-      return getSaslStreams(peer, underlyingOut, underlyingIn, datanodeId);
+      return getSaslStreams(peer, underlyingOut, underlyingIn);
     } else if (dnConf.getIgnoreSecurePortsForTesting()) {
       // It's a secured cluster using non-privileged ports, but no SASL.  The
       // only way this can happen is if the DataNode has
@@ -271,12 +273,11 @@ public class SaslDataTransferServer {
    * @param peer connection peer
    * @param underlyingOut connection output stream
    * @param underlyingIn connection input stream
-   * @param datanodeId ID of DataNode accepting connection
    * @return new pair of streams, wrapped after SASL negotiation
    * @throws IOException for any error
    */
   private IOStreamPair getSaslStreams(Peer peer, OutputStream underlyingOut,
-      InputStream underlyingIn, final DatanodeID datanodeId) throws IOException {
+      InputStream underlyingIn) throws IOException {
     if (peer.hasSecureChannel() ||
         dnConf.getTrustedChannelResolver().isTrusted(getPeerAddress(peer))) {
       return new IOStreamPair(underlyingIn, underlyingOut);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ba8d8c7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
index 2a45a42..a235c20 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
@@ -182,7 +182,8 @@ class DataXceiver extends Receiver implements Runnable {
       InputStream input = socketIn;
       try {
         IOStreamPair saslStreams = datanode.saslServer.receive(peer, socketOut,
-          socketIn, datanode.getDatanodeId());
+          socketIn, datanode.getXferAddress().getPort(),
+          datanode.getDatanodeId());
         input = new BufferedInputStream(saslStreams.in,
           HdfsConstants.SMALL_BUFFER_SIZE);
         socketOut = saslStreams.out;