You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ta...@apache.org on 2021/06/17 01:01:19 UTC

[hadoop] branch branch-3.3 updated: HADOOP-17760. Delete hadoop.ssl.enabled and dfs.https.enable from docs and core-default.xml (#3099)

This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 25138c9  HADOOP-17760. Delete hadoop.ssl.enabled and dfs.https.enable from docs and core-default.xml (#3099)
25138c9 is described below

commit 25138c98bffccea833adbabac10baf2494ea084f
Author: Takanobu Asanuma <ta...@apache.org>
AuthorDate: Thu Jun 17 09:58:47 2021 +0900

    HADOOP-17760. Delete hadoop.ssl.enabled and dfs.https.enable from docs and core-default.xml (#3099)
    
    Reviewed-by: Ayush Saxena <ay...@apache.org>
    (cherry picked from commit 9e7c7ad129fcf466d9647e0672ecf7dd72213e72)
---
 .../hadoop-common/src/main/resources/core-default.xml             | 8 --------
 .../hadoop-common/src/site/markdown/SecureMode.md                 | 3 +--
 .../org/apache/hadoop/conf/TestCommonConfigurationFields.java     | 2 --
 hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json     | 2 --
 4 files changed, 1 insertion(+), 14 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 552e28e..8009984 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -3078,14 +3078,6 @@
 </property>
 
 <property>
-  <name>hadoop.ssl.enabled</name>
-  <value>false</value>
-  <description>
-    Deprecated. Use dfs.http.policy and yarn.http.policy instead.
-  </description>
-</property>
-
-<property>
   <name>hadoop.ssl.enabled.protocols</name>
   <value>TLSv1.2</value>
   <description>
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
index 856861f..c360277 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
@@ -267,9 +267,8 @@ The following settings allow configuring SSL access to the NameNode web UI (opti
 
 | Parameter                    | Value                                           | Notes                                                                                                                                                                                                                                                                                                                                                                                              |
 |:-----------------------------|:------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| `dfs.http.policy`            | `HTTP_ONLY` or `HTTPS_ONLY` or `HTTP_AND_HTTPS` | `HTTPS_ONLY` turns off http access. This option takes precedence over the deprecated configuration dfs.https.enable and hadoop.ssl.enabled. If using SASL to authenticate data transfer protocol instead of running DataNode as root and using privileged ports, then this property must be set to `HTTPS_ONLY` to guarantee authentication of HTTP servers. (See `dfs.data.transfer.protection`.) |
+| `dfs.http.policy`            | `HTTP_ONLY` or `HTTPS_ONLY` or `HTTP_AND_HTTPS` | `HTTPS_ONLY` turns off http access. If using SASL to authenticate data transfer protocol instead of running DataNode as root and using privileged ports, then this property must be set to `HTTPS_ONLY` to guarantee authentication of HTTP servers. (See `dfs.data.transfer.protection`.) |
 | `dfs.namenode.https-address` | `0.0.0.0:9871`                                 | This parameter is used in non-HA mode and without federation. See [HDFS High Availability](../hadoop-hdfs/HDFSHighAvailabilityWithNFS.html#Deployment) and [HDFS Federation](../hadoop-hdfs/Federation.html#Federation_Configuration) for details.                                                                                                                                                 |
-| `dfs.https.enable`           | `true`                                          | This value is deprecated. `Use dfs.http.policy`                                                                                                                                                                                                                                                                                                                                                    |
 
 ### Secondary NameNode
 
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
index dd9f41a..8a6d0c4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
@@ -236,8 +236,6 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
     // - org.apache.hadoop.net.NetUtils
     xmlPropsToSkipCompare
         .add("hadoop.rpc.socket.factory.class.ClientProtocol");
-    // - Where is this used?
-    xmlPropsToSkipCompare.add("hadoop.ssl.enabled");
 
     // Keys with no corresponding variable
     // - org.apache.hadoop.io.compress.bzip2.Bzip2Factory
diff --git a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
index 8c2e43e..8e9af75 100644
--- a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
+++ b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
@@ -4559,7 +4559,6 @@
     "hadoop.hdfs.configuration.version" : "1",
     "dfs.datanode.balance.bandwidthPerSec" : "1048576",
     "mapreduce.reduce.shuffle.connect.timeout" : "180000",
-    "hadoop.ssl.enabled" : "false",
     "dfs.journalnode.rpc-address" : "0.0.0.0:8485",
     "yarn.nodemanager.aux-services" : "mapreduce.shuffle",
     "mapreduce.job.counters.max" : "120",
@@ -9626,7 +9625,6 @@
     "hadoop.hdfs.configuration.version" : "1",
     "dfs.datanode.balance.bandwidthPerSec" : "1048576",
     "mapreduce.reduce.shuffle.connect.timeout" : "180000",
-    "hadoop.ssl.enabled" : "false",
     "dfs.journalnode.rpc-address" : "0.0.0.0:8485",
     "yarn.nodemanager.aux-services" : "mapreduce.shuffle",
     "mapreduce.job.counters.max" : "120",

---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org