You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by xy...@apache.org on 2018/09/20 22:22:06 UTC
[44/50] [abbrv] hadoop git commit: HDDS-6. Enable SCM kerberos auth.
Contributed by Ajay Kumar.
HDDS-6. Enable SCM kerberos auth. Contributed by Ajay Kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9dc72d12
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9dc72d12
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9dc72d12
Branch: refs/heads/HDDS-4
Commit: 9dc72d12181acf42ce3f807eb485356154ecda34
Parents: 524f7cd
Author: Xiaoyu Yao <xy...@apache.org>
Authored: Wed May 9 15:56:03 2018 -0700
Committer: Xiaoyu Yao <xy...@apache.org>
Committed: Thu Sep 20 15:13:06 2018 -0700
----------------------------------------------------------------------
.../authentication/util/KerberosUtil.java | 2 +-
.../conf/TestConfigurationFieldsBase.java | 2 +
.../java/org/apache/hadoop/hdds/HddsUtils.java | 13 +-
.../apache/hadoop/hdds/scm/ScmConfigKeys.java | 9 +-
.../scm/protocol/ScmBlockLocationProtocol.java | 3 +
.../StorageContainerLocationProtocol.java | 4 +
.../protocolPB/ScmBlockLocationProtocolPB.java | 6 +
.../StorageContainerLocationProtocolPB.java | 4 +
.../apache/hadoop/ozone/OzoneConfigKeys.java | 3 +
.../common/src/main/resources/ozone-default.xml | 35 +++-
.../StorageContainerDatanodeProtocol.java | 4 +
.../StorageContainerDatanodeProtocolPB.java | 6 +
.../scm/server/StorageContainerManager.java | 51 ++++-
.../StorageContainerManagerHttpServer.java | 5 +-
.../ozone/client/protocol/ClientProtocol.java | 3 +
hadoop-ozone/common/src/main/bin/start-ozone.sh | 7 +
hadoop-ozone/common/src/main/bin/stop-ozone.sh | 13 +-
hadoop-ozone/integration-test/pom.xml | 6 +
.../hadoop/ozone/MiniOzoneClusterImpl.java | 17 +-
.../hadoop/ozone/TestSecureOzoneCluster.java | 205 +++++++++++++++++++
.../ozone/TestStorageContainerManager.java | 5 +-
21 files changed, 366 insertions(+), 37 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
index c011045..4459928 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
@@ -167,7 +167,7 @@ public class KerberosUtil {
}
/* Return fqdn of the current host */
- static String getLocalHostName() throws UnknownHostException {
+ public static String getLocalHostName() throws UnknownHostException {
return InetAddress.getLocalHost().getCanonicalHostName();
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index 152159b..bce1cd5 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -436,6 +436,8 @@ public abstract class TestConfigurationFieldsBase {
// Create XML key/value map
LOG_XML.debug("Reading XML property files\n");
xmlKeyValueMap = extractPropertiesFromXml(xmlFilename);
+ // Remove hadoop property set in ozone-default.xml
+ xmlKeyValueMap.remove("hadoop.custom.tags");
LOG_XML.debug("\n=====\n");
// Create default configuration variable key/value map
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
index db9d374..efafb5c 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -242,18 +242,7 @@ public final class HddsUtils {
}
public static boolean isHddsEnabled(Configuration conf) {
- String securityEnabled =
- conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
- "simple");
- boolean securityAuthorizationEnabled = conf.getBoolean(
- CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false);
-
- if (securityEnabled.equals("kerberos") || securityAuthorizationEnabled) {
- LOG.error("Ozone is not supported in a security enabled cluster. ");
- return false;
- } else {
- return conf.getBoolean(OZONE_ENABLED, OZONE_ENABLED_DEFAULT);
- }
+ return conf.getBoolean(OZONE_ENABLED, OZONE_ENABLED_DEFAULT);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
index 63f5916..e8b0930 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
@@ -158,8 +158,9 @@ public final class ScmConfigKeys {
"ozone.scm.http-address";
public static final String OZONE_SCM_HTTPS_ADDRESS_KEY =
"ozone.scm.https-address";
- public static final String OZONE_SCM_KEYTAB_FILE =
- "ozone.scm.keytab.file";
+ public static final String OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY =
+ "ozone.scm.kerberos.keytab.file";
+ public static final String OZONE_SCM_KERBEROS_PRINCIPAL_KEY = "ozone.scm.kerberos.principal";
public static final String OZONE_SCM_HTTP_BIND_HOST_DEFAULT = "0.0.0.0";
public static final int OZONE_SCM_HTTP_BIND_PORT_DEFAULT = 9876;
public static final int OZONE_SCM_HTTPS_BIND_PORT_DEFAULT = 9877;
@@ -278,6 +279,10 @@ public final class ScmConfigKeys {
public static final String HDDS_SCM_WATCHER_TIMEOUT_DEFAULT =
"10m";
+ public static final String SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY =
+ "ozone.scm.web.authentication.kerberos.principal";
+ public static final String SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY =
+ "ozone.scm.web.authentication.kerberos.keytab";
/**
* Never constructed.
*/
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
index c8d4a80..e17f1c2 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hdds.scm.protocol;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.hdds.scm.ScmInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
@@ -31,6 +33,7 @@ import java.util.List;
* ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes
* to read/write a block.
*/
+@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
public interface ScmBlockLocationProtocol {
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
index c55062b..454b9f3 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
@@ -17,6 +17,8 @@
package org.apache.hadoop.hdds.scm.protocol;
+import org.apache.hadoop.hdds.HddsConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.ScmInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
@@ -27,11 +29,13 @@ import org.apache.hadoop.hdds.protocol.proto
import java.io.IOException;
import java.util.List;
+import org.apache.hadoop.security.KerberosInfo;
/**
* ContainerLocationProtocol is used by an HDFS node to find the set of nodes
* that currently host a container.
*/
+@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
public interface StorageContainerLocationProtocol {
/**
* Asks SCM where a container should be allocated. SCM responds with the
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
index 837c95b..89bb066 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
@@ -18,9 +18,13 @@
package org.apache.hadoop.hdds.scm.protocolPB;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.HddsConfigKeys;
import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
.ScmBlockLocationProtocolService;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.security.KerberosInfo;
/**
* Protocol used from an HDFS node to StorageContainerManager. This extends the
@@ -30,6 +34,8 @@ import org.apache.hadoop.ipc.ProtocolInfo;
"org.apache.hadoop.ozone.protocol.ScmBlockLocationProtocol",
protocolVersion = 1)
@InterfaceAudience.Private
+@KerberosInfo(
+ serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
public interface ScmBlockLocationProtocolPB
extends ScmBlockLocationProtocolService.BlockingInterface {
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
index f234ad3..3bd83f9 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
@@ -21,7 +21,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerLocationProtocolProtos
.StorageContainerLocationProtocolService;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.security.KerberosInfo;
/**
* Protocol used from an HDFS node to StorageContainerManager. This extends the
@@ -30,6 +32,8 @@ import org.apache.hadoop.ipc.ProtocolInfo;
@ProtocolInfo(protocolName =
"org.apache.hadoop.ozone.protocol.StorageContainerLocationProtocol",
protocolVersion = 1)
+@KerberosInfo(
+ serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
@InterfaceAudience.Private
public interface StorageContainerLocationProtocolPB
extends StorageContainerLocationProtocolService.BlockingInterface {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
index 599b4e8..fb83052 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
@@ -296,6 +296,9 @@ public final class OzoneConfigKeys {
"hdds.lock.suppress.warning.interval.ms";
public static final long
HDDS_LOCK_SUPPRESS_WARNING_INTERVAL_MS_DEAFULT = 10000L;
+ public static final String OZONE_SECURITY_ENABLED_KEY = "ozone.security.enabled";
+ public static final String OZONE_SYSTEM_TAGS_KEY = "ozone.system.tags";
+ public static final boolean OZONE_SECURITY_ENABLED_DEFAULT = false;
public static final String OZONE_CONTAINER_COPY_WORKDIR =
"hdds.datanode.replication.work.dir";
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/common/src/main/resources/ozone-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml
index f7681e8..65b4dc4 100644
--- a/hadoop-hdds/common/src/main/resources/ozone-default.xml
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -1115,7 +1115,6 @@
to occur.
</description>
</property>
-
<property>
<name>hdds.lock.suppress.warning.interval.ms</name>
<value>10000</value>
@@ -1125,7 +1124,13 @@
consecutive warnings within this interval.
</description>
</property>
-
+ <property>
+ <name>ozone.security.enabled</name>
+ <value>false</value>
+ <tag> OZONE, SECURITY, FLAG</tag>
+ <description>True if security is enabled for ozone. When this property is true, hadoop.security.authentication should be Kerberos.
+ </description>
+ </property>
<property>
<name>hdds.command.status.report.interval</name>
<value>30s</value>
@@ -1193,7 +1198,6 @@
datanode unless the datanode confirms the completion.
</description>
</property>
-
<property>
<name>hdds.db.profile</name>
<value>SSD</value>
@@ -1202,7 +1206,6 @@
that tunes the RocksDB settings for the hardware it is running
on. Right now, we have SSD and DISK as profile options.</description>
</property>
-
<property>
<name>hdds.datanode.replication.work.dir</name>
<tag>DATANODE</tag>
@@ -1222,4 +1225,28 @@
</description>
</property>
+ <property>
+ <name>ozone.scm.kerberos.keytab.file</name>
+ <value></value>
+ <tag> OZONE, SECURITY</tag>
+ <description> The keytab file used by each SCM daemon to login as its
+ service principal. The principal name is configured with
+ ozone.scm.kerberos.principal.
+ </description>
+ </property>
+ <property>
+ <name>ozone.scm.kerberos.principal</name>
+ <value></value>
+ <tag> OZONE, SECURITY</tag>
+ <description>The SCM service principal. Ex scm/_HOST@REALM.TLD.</description>
+ </property>
+
+ <property>
+ <name>ozone.scm.web.authentication.kerberos.principal</name>
+ <value>HTTP/_HOST@EXAMPLE.COM</value>
+ </property>
+ <property>
+ <name>ozone.scm.web.authentication.kerberos.keytab</name>
+ <value>/etc/security/keytabs/HTTP.keytab</value>
+ </property>
</configuration>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
index 9296524..b723d28 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
@@ -41,11 +41,15 @@ import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
import java.io.IOException;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.security.KerberosInfo;
/**
* The protocol spoken between datanodes and SCM. For specifics please the
* Protoc file that defines this protocol.
*/
+@KerberosInfo(
+ serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
@InterfaceAudience.Private
public interface StorageContainerDatanodeProtocol {
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
index 9b28b5a..9c32ef8 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
@@ -19,7 +19,10 @@ package org.apache.hadoop.ozone.protocolPB;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos
.StorageContainerDatanodeProtocolService;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.security.KerberosInfo;
/**
* Protocol used from a datanode to StorageContainerManager. This extends
@@ -29,6 +32,9 @@ import org.apache.hadoop.ipc.ProtocolInfo;
@ProtocolInfo(protocolName =
"org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol",
protocolVersion = 1)
+@KerberosInfo(
+ serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY,
+ clientPrincipal = DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY)
public interface StorageContainerDatanodeProtocolPB extends
StorageContainerDatanodeProtocolService.BlockingInterface {
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
index b2cbc93..7d1ebb4 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
@@ -28,10 +28,12 @@ import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.google.protobuf.BlockingService;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.HddsUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.HddsServerUtil;
import org.apache.hadoop.hdds.scm.block.BlockManager;
import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
import org.apache.hadoop.hdds.scm.block.PendingDeleteHandler;
@@ -78,6 +80,9 @@ import org.apache.hadoop.ozone.common.Storage.StorageState;
import org.apache.hadoop.ozone.common.StorageInfo;
import org.apache.hadoop.ozone.lease.LeaseManager;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.StringUtils;
@@ -100,6 +105,10 @@ import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_D
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY;
import static org.apache.hadoop.util.ExitUtil.terminate;
/**
@@ -176,6 +185,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
private final ReplicationManager replicationManager;
private final LeaseManager<Long> commandWatcherLeaseManager;
+ private Configuration scmConf;
private final ReplicationActivityStatus replicationStatus;
private final SCMChillModeManager scmChillModeManager;
@@ -187,13 +197,19 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
*
* @param conf configuration
*/
- private StorageContainerManager(OzoneConfiguration conf) throws IOException {
+ private StorageContainerManager(OzoneConfiguration conf)
+ throws IOException, AuthenticationException {
final int cacheSize = conf.getInt(OZONE_SCM_DB_CACHE_SIZE_MB,
OZONE_SCM_DB_CACHE_SIZE_DEFAULT);
-
+ this.scmConf = conf;
StorageContainerManager.initMetrics();
initContainerReportCache(conf);
+ // Authenticate SCM if security is enabled
+ if (this.scmConf.getBoolean(OZONE_SECURITY_ENABLED_KEY,
+ OZONE_SECURITY_ENABLED_DEFAULT)) {
+ loginAsSCMUser(this.scmConf);
+ }
scmStorage = new SCMStorage(conf);
if (scmStorage.getState() != StorageState.INITIALIZED) {
@@ -310,6 +326,33 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
}
/**
+ * Login as the configured user for SCM.
+ *
+ * @param conf
+ */
+ private void loginAsSCMUser(Configuration conf)
+ throws IOException, AuthenticationException {
+ LOG.debug("Ozone security is enabled. Attempting login for SCM user. "
+ + "Principal: {}, keytab: {}", this.scmConf.get
+ (OZONE_SCM_KERBEROS_PRINCIPAL_KEY),
+ this.scmConf.get(OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY));
+
+ if (SecurityUtil.getAuthenticationMethod(conf).equals
+ (AuthenticationMethod.KERBEROS)) {
+ UserGroupInformation.setConfiguration(this.scmConf);
+ InetSocketAddress socAddr = HddsServerUtil
+ .getScmBlockClientBindAddress(conf);
+ SecurityUtil.login(conf, OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY,
+ OZONE_SCM_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName());
+ } else {
+ throw new AuthenticationException(SecurityUtil.getAuthenticationMethod
+ (conf) + " authentication method not support. "
+ + "SCM user login failed.");
+ }
+ LOG.info("SCM login successful.");
+ }
+
+ /**
* Builds a message for logging startup information about an RPC server.
*
* @param description RPC server description
@@ -390,6 +433,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
out.println(USAGE + "\n");
}
+<<<<<<< HEAD
/**
* Create an SCM instance based on the supplied command-line arguments.
*
@@ -420,7 +464,8 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
private static StorageContainerManager createSCM(
String[] args,
OzoneConfiguration conf,
- boolean printBanner) throws IOException {
+ boolean printBanner)
+ throws IOException, AuthenticationException {
String[] argv = (args == null) ? new String[0] : args;
if (!HddsUtils.isHddsEnabled(conf)) {
System.err.println(
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
index 75b2036..da936ad 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdds.scm.server;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.server.BaseHttpServer;
-import org.apache.hadoop.ozone.OzoneConfigKeys;
import java.io.IOException;
@@ -63,11 +62,11 @@ public class StorageContainerManagerHttpServer extends BaseHttpServer {
}
@Override protected String getKeytabFile() {
- return ScmConfigKeys.OZONE_SCM_KEYTAB_FILE;
+ return ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY;
}
@Override protected String getSpnegoPrincipal() {
- return OzoneConfigKeys.OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
+ return ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
}
@Override protected String getEnabledKey() {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
index 008b69d..f3c710b 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.ozone.client.protocol;
import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.client.*;
import org.apache.hadoop.hdds.client.OzoneQuota;
@@ -29,6 +30,7 @@ import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import java.io.IOException;
import java.util.List;
+import org.apache.hadoop.security.KerberosInfo;
/**
* An implementer of this interface is capable of connecting to Ozone Cluster
@@ -38,6 +40,7 @@ import java.util.List;
* includes: {@link org.apache.hadoop.ozone.client.rpc.RpcClient} for RPC and
* {@link org.apache.hadoop.ozone.client.rest.RestClient} for REST.
*/
+@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)
public interface ClientProtocol {
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/common/src/main/bin/start-ozone.sh
----------------------------------------------------------------------
diff --git a/hadoop-ozone/common/src/main/bin/start-ozone.sh b/hadoop-ozone/common/src/main/bin/start-ozone.sh
index cfb54e0..536d09e 100755
--- a/hadoop-ozone/common/src/main/bin/start-ozone.sh
+++ b/hadoop-ozone/common/src/main/bin/start-ozone.sh
@@ -75,6 +75,13 @@ if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} ==
exit 1
fi
+#SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-)
+#SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-)
+#if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then
+# echo "Ozone is not supported in a security enabled cluster."
+# exit 1
+#fi
+
#---------------------------------------------------------
# Check if ozone is enabled
OZONE_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey ozone.enabled | tr '[:upper:]' '[:lower:]' 2>&-)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/common/src/main/bin/stop-ozone.sh
----------------------------------------------------------------------
diff --git a/hadoop-ozone/common/src/main/bin/stop-ozone.sh b/hadoop-ozone/common/src/main/bin/stop-ozone.sh
index 97e1df4..22b2f9f 100755
--- a/hadoop-ozone/common/src/main/bin/stop-ozone.sh
+++ b/hadoop-ozone/common/src/main/bin/stop-ozone.sh
@@ -47,13 +47,12 @@ else
exit 1
fi
-SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-)
-SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-)
-
-if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then
- echo "Ozone is not supported in a security enabled cluster."
- exit 1
-fi
+#SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-)
+#SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-)
+#if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then
+# echo "Ozone is not supported in a security enabled cluster."
+# exit 1
+#fi
#---------------------------------------------------------
# Check if ozone is enabled
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/integration-test/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/pom.xml b/hadoop-ozone/integration-test/pom.xml
index 993e91f..2331cfd 100644
--- a/hadoop-ozone/integration-test/pom.xml
+++ b/hadoop-ozone/integration-test/pom.xml
@@ -47,6 +47,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-minikdc</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-ozone-objectstore-service</artifactId>
<scope>provided</scope>
</dependency>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
index b34a7d1..6b341d3 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hdds.scm.protocolPB
import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.test.GenericTestUtils;
import org.slf4j.Logger;
@@ -347,9 +348,16 @@ public final class MiniOzoneClusterImpl implements MiniOzoneCluster {
public MiniOzoneCluster build() throws IOException {
DefaultMetricsSystem.setMiniClusterMode(true);
initializeConfiguration();
- StorageContainerManager scm = createSCM();
- scm.start();
- OzoneManager om = createOM();
+ StorageContainerManager scm;
+ OzoneManager om;
+ try {
+ scm = createSCM();
+ scm.start();
+ om = createOM();
+ } catch (AuthenticationException ex) {
+ throw new IOException("Unable to build MiniOzoneCluster. ", ex);
+ }
+
om.start();
final List<HddsDatanodeService> hddsDatanodes = createHddsDatanodes(scm);
MiniOzoneClusterImpl cluster = new MiniOzoneClusterImpl(conf, om, scm,
@@ -380,7 +388,8 @@ public final class MiniOzoneClusterImpl implements MiniOzoneCluster {
*
* @throws IOException
*/
- private StorageContainerManager createSCM() throws IOException {
+ private StorageContainerManager createSCM()
+ throws IOException, AuthenticationException {
configureSCM();
SCMStorage scmStore = new SCMStorage(conf);
initializeScmStorage(scmStore);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
new file mode 100644
index 0000000..9c430ad
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
@@ -0,0 +1,205 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Properties;
+import java.util.UUID;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.server.SCMStorage;
+import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.KerberosAuthException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.LambdaTestUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test class to for security enabled Ozone cluster.
+ */
+@InterfaceAudience.Private
+public final class TestSecureOzoneCluster {
+
+ private Logger LOGGER = LoggerFactory
+ .getLogger(TestSecureOzoneCluster.class);
+
+ private MiniKdc miniKdc;
+ private OzoneConfiguration conf;
+ private File workDir;
+ private static Properties securityProperties;
+ private File scmKeytab;
+ private File spnegoKeytab;
+ private String curUser;
+
+ @Before
+ public void init() {
+ try {
+ conf = new OzoneConfiguration();
+ startMiniKdc();
+ setSecureConfig(conf);
+ createCredentialsInKDC(conf, miniKdc);
+ } catch (IOException e) {
+ LOGGER.error("Failed to initialize TestSecureOzoneCluster", e);
+ } catch (Exception e) {
+ LOGGER.error("Failed to initialize TestSecureOzoneCluster", e);
+ }
+ }
+
+ private void createCredentialsInKDC(Configuration conf, MiniKdc miniKdc)
+ throws Exception {
+ createPrincipal(scmKeytab,
+ conf.get(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY));
+ createPrincipal(spnegoKeytab,
+ conf.get(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY));
+ }
+
+ private void createPrincipal(File keytab, String... principal)
+ throws Exception {
+ miniKdc.createPrincipal(keytab, principal);
+ }
+
+ private void startMiniKdc() throws Exception {
+ workDir = GenericTestUtils
+ .getTestDir(TestSecureOzoneCluster.class.getSimpleName());
+ securityProperties = MiniKdc.createConf();
+ miniKdc = new MiniKdc(securityProperties, workDir);
+ miniKdc.start();
+ }
+
+ private void setSecureConfig(Configuration conf) throws IOException {
+ conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, true);
+ String host = KerberosUtil.getLocalHostName();
+ String realm = miniKdc.getRealm();
+ curUser = UserGroupInformation.getCurrentUser()
+ .getUserName();
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ conf.set(OZONE_ADMINISTRATORS, curUser);
+
+ conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY,
+ "scm/" + host + "@" + realm);
+ conf.set(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+ "HTTP_SCM/" + host + "@" + realm);
+
+ scmKeytab = new File(workDir, "scm.keytab");
+ spnegoKeytab = new File(workDir, "http.keytab");
+
+ conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY,
+ scmKeytab.getAbsolutePath());
+ conf.set(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY,
+ spnegoKeytab.getAbsolutePath());
+
+ }
+
+ @Test
+ public void testSecureScmStartupSuccess() throws Exception {
+ final String path = GenericTestUtils
+ .getTempPath(UUID.randomUUID().toString());
+ Path scmPath = Paths.get(path, "scm-meta");
+ conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+ conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true);
+ SCMStorage scmStore = new SCMStorage(conf);
+ String clusterId = UUID.randomUUID().toString();
+ String scmId = UUID.randomUUID().toString();
+ scmStore.setClusterId(clusterId);
+ scmStore.setScmId(scmId);
+ // writes the version file properties
+ scmStore.initialize();
+ StorageContainerManager scm = StorageContainerManager.createSCM(null, conf);
+ //Reads the SCM Info from SCM instance
+ ScmInfo scmInfo = scm.getClientProtocolServer().getScmInfo();
+ Assert.assertEquals(clusterId, scmInfo.getClusterId());
+ Assert.assertEquals(scmId, scmInfo.getScmId());
+ }
+
+ @Test
+ public void testSecureScmStartupFailure() throws Exception {
+ final String path = GenericTestUtils
+ .getTempPath(UUID.randomUUID().toString());
+ Path scmPath = Paths.get(path, "scm-meta");
+
+ OzoneConfiguration conf = new OzoneConfiguration();
+ conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, true);
+ conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+ conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true);
+ conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY,
+ "scm@" + miniKdc.getRealm());
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+
+ SCMStorage scmStore = new SCMStorage(conf);
+ String clusterId = UUID.randomUUID().toString();
+ String scmId = UUID.randomUUID().toString();
+ scmStore.setClusterId(clusterId);
+ scmStore.setScmId(scmId);
+ // writes the version file properties
+ scmStore.initialize();
+ LambdaTestUtils.intercept(IOException.class,
+ "Running in secure mode, but config doesn't have a keytab",
+ () -> {
+ StorageContainerManager.createSCM(null, conf);
+ });
+
+ conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY,
+ "scm/_HOST@EXAMPLE.com");
+ conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY,
+ "/etc/security/keytabs/scm.keytab");
+
+ LambdaTestUtils.intercept(KerberosAuthException.class, "failure "
+ + "to login: for principal:",
+ () -> {
+ StorageContainerManager.createSCM(null, conf);
+ });
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "OAuth2");
+
+ LambdaTestUtils.intercept(IllegalArgumentException.class, "Invalid"
+ + " attribute value for hadoop.security.authentication of OAuth2",
+ () -> {
+ StorageContainerManager.createSCM(null, conf);
+ });
+
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "KERBEROS_SSL");
+ LambdaTestUtils.intercept(AuthenticationException.class,
+ "KERBEROS_SSL authentication method not support.",
+ () -> {
+ StorageContainerManager.createSCM(null, conf);
+ });
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/9dc72d12/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
index 94ab6c8..301a0c3 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
@@ -68,8 +68,10 @@ import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
+
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
@@ -434,7 +436,8 @@ public class TestStorageContainerManager {
}
@Test
- public void testSCMInitializationFailure() throws IOException {
+ public void testSCMInitializationFailure()
+ throws IOException, AuthenticationException {
OzoneConfiguration conf = new OzoneConfiguration();
final String path =
GenericTestUtils.getTempPath(UUID.randomUUID().toString());
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org