You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by in...@apache.org on 2019/03/03 18:37:22 UTC

[hadoop] 43/45: HDFS-14052. RBF: Use Router keytab for WebHDFS. Contributed by CR Hota.

This is an automated email from the ASF dual-hosted git repository.

inigoiri pushed a commit to branch HDFS-13891
in repository https://gitbox.apache.org/repos/asf/hadoop.git

commit 5d548189c67047121eead97948d21aaea75a303a
Author: Brahma Reddy Battula <br...@apache.org>
AuthorDate: Tue Feb 26 07:42:23 2019 +0530

    HDFS-14052. RBF: Use Router keytab for WebHDFS. Contributed by CR Hota.
---
 .../server/federation/router/RouterHttpServer.java |  4 +-
 .../contract/router/web/RouterWebHDFSContract.java | 12 ++--
 .../router/TestRouterWithSecureStartup.java        | 69 ++++++++++++++++++++++
 3 files changed, 80 insertions(+), 5 deletions(-)

diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterHttpServer.java
index d6a5146..300bc07 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterHttpServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterHttpServer.java
@@ -88,7 +88,9 @@ public class RouterHttpServer extends AbstractService {
 
     this.httpServer = builder.build();
 
-    NameNodeHttpServer.initWebHdfs(conf, httpAddress.getHostName(), null,
+    String httpKeytab = conf.get(DFSUtil.getSpnegoKeytabKey(conf,
+        RBFConfigKeys.DFS_ROUTER_KEYTAB_FILE_KEY));
+    NameNodeHttpServer.initWebHdfs(conf, httpAddress.getHostName(), httpKeytab,
         httpServer, RouterWebHdfsMethods.class.getPackage().getName());
 
     this.httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, this.router);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/web/RouterWebHDFSContract.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/web/RouterWebHDFSContract.java
index 02e9f39..4e205df 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/web/RouterWebHDFSContract.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/web/RouterWebHDFSContract.java
@@ -55,16 +55,20 @@ public class RouterWebHDFSContract extends HDFSContract {
   }
 
   public static void createCluster() throws IOException {
+    createCluster(new HdfsConfiguration());
+  }
+
+  public static void createCluster(Configuration conf) throws IOException {
     try {
-      HdfsConfiguration conf = new HdfsConfiguration();
       conf.addResource(CONTRACT_HDFS_XML);
       conf.addResource(CONTRACT_WEBHDFS_XML);
 
-      cluster = new MiniRouterDFSCluster(true, 2);
+      cluster = new MiniRouterDFSCluster(true, 2, conf);
 
       // Start NNs and DNs and wait until ready
-      cluster.startCluster();
+      cluster.startCluster(conf);
 
+      cluster.addRouterOverrides(conf);
       // Start routers with only an RPC service
       cluster.startRouters();
 
@@ -85,7 +89,7 @@ public class RouterWebHDFSContract extends HDFSContract {
       cluster.waitActiveNamespaces();
     } catch (Exception e) {
       cluster = null;
-      throw new IOException("Cannot start federated cluster", e);
+      throw new IOException(e.getCause());
     }
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWithSecureStartup.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWithSecureStartup.java
new file mode 100644
index 0000000..7cc2c87
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWithSecureStartup.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.federation.router;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.router.web.RouterWebHDFSContract;
+import org.junit.Rule;
+import org.junit.Test;
+import static org.junit.Assert.assertNotNull;
+import org.junit.rules.ExpectedException;
+import java.io.IOException;
+
+import static org.apache.hadoop.fs.contract.router.SecurityConfUtil.initSecurity;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_KEYTAB_FILE_KEY;
+
+
+/**
+ * Test secure router start up scenarios.
+ */
+public class TestRouterWithSecureStartup {
+
+  @Rule
+  public ExpectedException exceptionRule = ExpectedException.none();
+
+  @Test
+  public void testStartupWithoutSpnegoPrincipal() throws Exception {
+    testCluster(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+        "Unable to initialize WebAppContext");
+  }
+
+  @Test
+  public void testStartupWithoutKeytab() throws Exception {
+    testCluster(DFS_ROUTER_KEYTAB_FILE_KEY,
+        "Running in secure mode, but config doesn't have a keytab");
+  }
+
+  @Test
+  public void testSuccessfulStartup() throws Exception {
+    Configuration conf = initSecurity();
+    RouterWebHDFSContract.createCluster(conf);
+    assertNotNull(RouterWebHDFSContract.getCluster());
+  }
+
+  private void testCluster(String configToTest, String message)
+      throws Exception {
+    Configuration conf = initSecurity();
+    conf.unset(configToTest);
+    exceptionRule.expect(IOException.class);
+    exceptionRule.expectMessage(message);
+    RouterWebHDFSContract.createCluster(conf);
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org