You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ta...@apache.org on 2019/03/26 18:30:30 UTC

[hadoop] branch branch-3.0 updated: HDFS-14037. Fix SSLFactory truststore reloader thread leak in URLConnectionFactory.

This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new abb06c9  HDFS-14037. Fix SSLFactory truststore reloader thread leak in URLConnectionFactory.
abb06c9 is described below

commit abb06c9d7c5e201ae62eec1885dd95bd6973ebcb
Author: Takanobu Asanuma <ta...@apache.org>
AuthorDate: Wed Mar 27 03:27:02 2019 +0900

    HDFS-14037. Fix SSLFactory truststore reloader thread leak in URLConnectionFactory.
    
    (cherry picked from commit 55fb3c32fb48ca26a629d4d5f3f07e2858d09594)
---
 .../hadoop/hdfs/web/SSLConnectionConfigurator.java | 72 ++++++++++++++++++++++
 .../hadoop/hdfs/web/URLConnectionFactory.java      | 43 +++----------
 .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java  |  3 +
 .../hadoop/hdfs/web/TestURLConnectionFactory.java  | 53 ++++++++++++++++
 .../federation/router/RouterWebHdfsMethods.java    |  2 +
 .../hdfs/qjournal/client/QuorumJournalManager.java |  1 +
 6 files changed, 139 insertions(+), 35 deletions(-)

diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java
new file mode 100644
index 0000000..7bf7ae1
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hdfs.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
+import org.apache.hadoop.security.ssl.SSLFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.security.GeneralSecurityException;
+
+/**
+ * Configure a connection to use SSL authentication.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+
+public class SSLConnectionConfigurator implements ConnectionConfigurator {
+  private final SSLFactory factory;
+  private final SSLSocketFactory sf;
+  private final HostnameVerifier hv;
+  private final int connectTimeout;
+  private final int readTimeout;
+
+  SSLConnectionConfigurator(int connectTimeout, int readTimeout,
+      Configuration conf) throws IOException, GeneralSecurityException {
+    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+    factory.init();
+    sf = factory.createSSLSocketFactory();
+    hv = factory.getHostnameVerifier();
+    this.connectTimeout = connectTimeout;
+    this.readTimeout = readTimeout;
+  }
+
+  @Override
+  public HttpURLConnection configure(HttpURLConnection conn) {
+    if (conn instanceof HttpsURLConnection) {
+      HttpsURLConnection c = (HttpsURLConnection) conn;
+      c.setSSLSocketFactory(sf);
+      c.setHostnameVerifier(hv);
+    }
+    conn.setConnectTimeout(connectTimeout);
+    conn.setReadTimeout(readTimeout);
+    return conn;
+  }
+
+  void destroy() {
+    factory.destroy();
+  }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
index 9713932..8b6c7f7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
@@ -22,11 +22,6 @@ import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URLConnection;
-import java.security.GeneralSecurityException;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.SSLSocketFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -36,7 +31,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
-import org.apache.hadoop.security.ssl.SSLFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -103,7 +97,7 @@ public class URLConnectionFactory {
       final int connectTimeout, final int readTimeout, Configuration conf) {
     ConnectionConfigurator conn;
     try {
-      conn = newSslConnConfigurator(connectTimeout, readTimeout, conf);
+      conn = new SSLConnectionConfigurator(connectTimeout, readTimeout, conf);
     } catch (Exception e) {
       LOG.warn(
           "Cannot load customized ssl related configuration. Fallback to" +
@@ -139,7 +133,7 @@ public class URLConnectionFactory {
     ConnectionConfigurator conn;
     try {
       ConnectionConfigurator sslConnConfigurator
-          = newSslConnConfigurator(connectTimeout, readTimeout, conf);
+          = new SSLConnectionConfigurator(connectTimeout, readTimeout, conf);
 
       conn = new OAuth2ConnectionConfigurator(conf, sslConnConfigurator);
     } catch (Exception e) {
@@ -153,33 +147,6 @@ public class URLConnectionFactory {
     this.connConfigurator = connConfigurator;
   }
 
-  private static ConnectionConfigurator newSslConnConfigurator(
-      final int connectTimeout, final int readTimeout, Configuration conf)
-      throws IOException, GeneralSecurityException {
-    final SSLFactory factory;
-    final SSLSocketFactory sf;
-    final HostnameVerifier hv;
-
-    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
-    factory.init();
-    sf = factory.createSSLSocketFactory();
-    hv = factory.getHostnameVerifier();
-
-    return new ConnectionConfigurator() {
-      @Override
-      public HttpURLConnection configure(HttpURLConnection conn)
-          throws IOException {
-        if (conn instanceof HttpsURLConnection) {
-          HttpsURLConnection c = (HttpsURLConnection) conn;
-          c.setSSLSocketFactory(sf);
-          c.setHostnameVerifier(hv);
-        }
-        URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout);
-        return conn;
-      }
-    };
-  }
-
   /**
    * Opens a url with read and connect timeouts
    *
@@ -242,4 +209,10 @@ public class URLConnectionFactory {
     connection.setConnectTimeout(connectTimeout);
     connection.setReadTimeout(readTimeout);
   }
+
+  public void destroy() {
+    if (connConfigurator instanceof SSLConnectionConfigurator) {
+      ((SSLConnectionConfigurator) connConfigurator).destroy();
+    }
+  }
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index d500b9f..ecee2e5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -1485,6 +1485,9 @@ public class WebHdfsFileSystem extends FileSystem
     } catch (IOException ioe) {
       LOG.debug("Token cancel failed: ", ioe);
     } finally {
+      if (connectionFactory != null) {
+        connectionFactory.destroy();
+      }
       super.close();
     }
   }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java
index e028def..2be8bf4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java
@@ -17,13 +17,17 @@
  */
 package org.apache.hadoop.hdfs.web;
 
+import java.io.File;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
@@ -64,4 +68,53 @@ public final class TestURLConnectionFactory {
         logs.getOutput().contains(
         "Cannot load customized ssl related configuration"));
   }
+
+  @Test
+  public void testSSLFactoryCleanup() throws Exception {
+    String baseDir = GenericTestUtils.getTempPath(
+        TestURLConnectionFactory.class.getSimpleName());
+    File base = new File(baseDir);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    String keystoreDir = new File(baseDir).getAbsolutePath();
+    String sslConfDir = KeyStoreTestUtil.getClasspathDir(
+        TestURLConnectionFactory.class);
+    Configuration conf = new Configuration();
+    KeyStoreTestUtil.setupSSLConfig(keystoreDir, sslConfDir, conf, false,
+        true);
+    Configuration sslConf = KeyStoreTestUtil.getSslConfig();
+
+    sslConf.set("fs.defaultFS", "swebhdfs://localhost");
+    FileSystem fs = FileSystem.get(sslConf);
+
+    ThreadGroup threadGroup = Thread.currentThread().getThreadGroup();
+
+    while (threadGroup.getParent() != null) {
+      threadGroup = threadGroup.getParent();
+    }
+
+    Thread[] threads = new Thread[threadGroup.activeCount()];
+
+    threadGroup.enumerate(threads);
+    Thread reloaderThread = null;
+    for (Thread thread : threads) {
+      if ((thread.getName() != null)
+          && (thread.getName().contains("Truststore reloader thread"))) {
+        reloaderThread = thread;
+      }
+    }
+    Assert.assertTrue("Reloader is not alive", reloaderThread.isAlive());
+
+    fs.close();
+
+    boolean reloaderStillAlive = true;
+    for (int i = 0; i < 10; i++) {
+      reloaderStillAlive = reloaderThread.isAlive();
+      if (!reloaderStillAlive) {
+        break;
+      }
+      Thread.sleep(1000);
+    }
+    Assert.assertFalse("Reloader is still alive", reloaderStillAlive);
+  }
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java
index 5e9d4d0..b40db00 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java
@@ -472,11 +472,13 @@ public class RouterWebHdfsMethods extends NamenodeWebHdfsMethods {
             (HttpURLConnection)connectionFactory.openConnection(url);
         conn.setRequestMethod(method);
 
+        connectionFactory.destroy();
         return conn;
       } catch (Exception e) {
         LOG.error("Cannot redirect request to {}", namenode, e);
       }
     }
+    connectionFactory.destroy();
     return null;
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
index 4faaa98..f06eec1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java
@@ -467,6 +467,7 @@ public class QuorumJournalManager implements JournalManager {
   @Override
   public void close() throws IOException {
     loggers.close();
+    connectionFactory.destroy();
   }
 
   public void selectInputStreams(Collection<EditLogInputStream> streams,


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org