You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by di...@apache.org on 2015/03/11 19:36:25 UTC

incubator-ranger git commit: RANGER-293: add server side checks for HDFS Repo connection properties

Repository: incubator-ranger
Updated Branches:
  refs/heads/master ac3754131 -> cedd97aa9


RANGER-293: add server side checks for HDFS Repo connection properties


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/cedd97aa
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/cedd97aa
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/cedd97aa

Branch: refs/heads/master
Commit: cedd97aa9b332f7014da52d7c8d1b585e5711cc7
Parents: ac37541
Author: Dilli Dorai Arumugam <da...@hortonworks.com>
Authored: Fri Mar 6 14:10:19 2015 -0800
Committer: Dilli Dorai Arumugam <da...@hortonworks.com>
Committed: Wed Mar 11 11:25:08 2015 -0700

----------------------------------------------------------------------
 .../ranger/services/hdfs/client/HdfsClient.java |  89 +++++++++--
 .../services/hdfs/client/HdfsClientTest.java    | 155 +++++++++++++++++++
 2 files changed, 233 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/cedd97aa/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
index 03f8124..1d1eef9 100644
--- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
+++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
@@ -192,15 +192,26 @@ public class HdfsClient extends BaseClient {
 	public static HashMap<String, Object> testConnection(String serviceName,
 			Map<String, String> configs) {
 
-		HashMap<String, Object> responseData = new HashMap<String, Object>();
-		boolean connectivityStatus = false;
-		HdfsClient connectionObj = new HdfsClient(serviceName, configs);
-		if (connectionObj != null) {
-			List<String> testResult = connectionObj.listFiles("/", null,null);
-			if (testResult != null && testResult.size() != 0) {
-				connectivityStatus = true;
-			}
-		}
+    HashMap<String, Object> responseData = new HashMap<String, Object>();
+    boolean connectivityStatus = false;
+
+    String validateConfigsMsg = null;
+    try {
+      validateConnectionConfigs(configs);
+    } catch (IllegalArgumentException e)  {
+      validateConfigsMsg = e.getMessage();
+    }
+
+    if (validateConfigsMsg == null) {
+		  HdfsClient connectionObj = new HdfsClient(serviceName, configs);
+		  if (connectionObj != null) {
+		  	List<String> testResult = connectionObj.listFiles("/", null,null);
+			  if (testResult != null && testResult.size() != 0) {
+			  	connectivityStatus = true;
+			  }
+		  }
+    }
+
 		if (connectivityStatus) {
 			String successMsg = "TestConnection Successful";
 			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
@@ -209,11 +220,67 @@ public class HdfsClient extends BaseClient {
 			String failureMsg = "Unable to retrieve any files using given parameters, "
 					+ "You can still save the repository and start creating policies, "
 					+ "but you would not be able to use autocomplete for resource names. "
-					+ "Check xa_portal.log for more info.";
-			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg,
+					+ "Check xa_portal.log for more info. ";
+      String additionalMsg = (validateConfigsMsg != null)  ?
+        validateConfigsMsg : failureMsg;
+			generateResponseDataMap(connectivityStatus, failureMsg, additionalMsg,
 					null, null, responseData);
 		}
 		return responseData;
 	}
 
+  public static void validateConnectionConfigs(Map<String, String> configs)
+      throws IllegalArgumentException {
+
+    // username
+    String username = configs.get("username") ;
+    if ((username == null || username.isEmpty()))  {
+      throw new IllegalArgumentException("Value for username not specified");
+    }
+
+    // password
+    String password = configs.get("password") ;
+    if ((password == null || password.isEmpty()))  {
+      throw new IllegalArgumentException("Value for password not specified");
+    }
+
+    // hadoop.security.authentication
+    String authentication = configs.get("hadoop.security.authentication") ;
+    if ((authentication == null || authentication.isEmpty()))  {
+      throw new IllegalArgumentException("Value for hadoop.security.authentication not specified");
+    }
+
+    String fsDefaultName = configs.get("fs.default.name") ;
+    fsDefaultName = (fsDefaultName == null) ? "" : fsDefaultName.trim();
+    String dfsNameservices = configs.get("dfs.nameservices");
+    dfsNameservices = (dfsNameservices == null) ? "" : dfsNameservices.trim();
+
+    if (fsDefaultName.isEmpty() && dfsNameservices.isEmpty())  {
+      throw new IllegalArgumentException("Value for neither fs.default.name nor dfs.nameservices is specified");
+    }
+
+    if (!fsDefaultName.isEmpty() && !dfsNameservices.isEmpty())  {
+      throw new IllegalArgumentException("Value for both fs.default.name and dfs.nameservices are specified. They are mutually exclusive");
+    }
+
+    if (!dfsNameservices.isEmpty()) {
+      String dfsNameNodes = configs.get("dfs.ha.namenodes." + dfsNameservices);
+      dfsNameNodes = (dfsNameNodes == null) ? "" : dfsNameNodes.trim();
+      if (dfsNameNodes.isEmpty())  {
+        throw new IllegalArgumentException("Value for " + "dfs.ha.namenodes." + dfsNameservices + " not specified");
+      }
+      String[] dfsNameNodeElements = dfsNameNodes.split(",");
+      System.out.println("elements: " + dfsNameNodeElements);
+      for (String dfsNameNodeElement : dfsNameNodeElements)  {
+        String nameNodeUrlKey = "dfs.namenode.rpc-address." +
+            dfsNameservices + "." + dfsNameNodeElement.trim();
+        String nameNodeUrl =  configs.get(nameNodeUrlKey);
+        nameNodeUrl = (nameNodeUrl == null) ? "" : nameNodeUrl.trim();
+        if (nameNodeUrl.isEmpty())  {
+          throw new IllegalArgumentException("Value for " + nameNodeUrlKey + " not specified");
+        }
+      }
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/cedd97aa/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
new file mode 100644
index 0000000..b5ccbf0
--- /dev/null
+++ b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.services.hdfs.client;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Test;
+
+
+public class HdfsClientTest {
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testUsernameNotSpecified() throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testPasswordNotSpecified() throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+  @Test(expected = IllegalArgumentException.class)
+  public void testAuthenticationNotSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testNeietherNnNorHaSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+	public void testBothNnAndHaSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    HdfsClient.validateConnectionConfigs(configs);
+	}
+
+  @Test(expected = IllegalArgumentException.class)
+	public void testNnElementsNotSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    HdfsClient.validateConnectionConfigs(configs);
+	}
+
+  @Test(expected = IllegalArgumentException.class)
+	public void testNn1UrlNn2UrlNotSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
+    HdfsClient.validateConnectionConfigs(configs);
+	}
+
+  @Test(expected = IllegalArgumentException.class)
+	public void testNn1UrlNotSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
+    configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn2", "node-2.example.com:8020");
+    HdfsClient.validateConnectionConfigs(configs);
+	}
+
+  @Test(expected = IllegalArgumentException.class)
+	public void testNn2UrlNotSpecified()  throws IllegalArgumentException {
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
+    configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn1", "node-1.example.com:8020");
+    HdfsClient.validateConnectionConfigs(configs);
+	}
+
+  @Test
+  public void testValidNonHaConfig()  throws IllegalArgumentException {
+
+    // username: hdfsuser
+    // password: hdfsuser
+    // hadoop.security.authentication: simple
+    // fs.default.name: hdfs://node-2.example.com:8020
+
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+
+  @Test
+  public void testValidHaConfig()  throws IllegalArgumentException {
+
+    // username: hdfsuser
+    // password: hdfsuser
+    // hadoop.security.authentication: simple
+    // dfs.nameservices: hwqe-1425428405
+    // fs.default.name:
+    // dfs.ha.namenodes.hwqe-1425428405: nn1,nn2
+    // dfs.namenode.rpc-address.hwqe-1425428405.nn2: node-2.example.com:8020
+    // dfs.namenode.rpc-address.hwqe-1425428405.nn1:  node-1.example.com:8020
+
+    Map configs = new HashMap<String, String>();
+    configs.put("username", "hdfsuser");
+    configs.put("password", "hdfsuser");
+    configs.put("hadoop.security.authentication", "simple");
+    // configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
+    configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn1", "node-1.example.com:8020");
+    configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn2", "node-2.example.com:8020");
+    HdfsClient.validateConnectionConfigs(configs);
+  }
+
+}