You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by an...@apache.org on 2017/03/31 17:17:26 UTC
ranger git commit: RANGER-1483 : Ranger hive service definition to
use hive metastore directly
Repository: ranger
Updated Branches:
refs/heads/ranger-0.7 d60ae8fe4 -> 6c0b06252
RANGER-1483 : Ranger hive service definition to use hive metastore directly
Project: http://git-wip-us.apache.org/repos/asf/ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/6c0b0625
Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/6c0b0625
Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/6c0b0625
Branch: refs/heads/ranger-0.7
Commit: 6c0b06252ad670bdbd5ffba93a3b51015308e9ec
Parents: d60ae8f
Author: Ankita Sinha <an...@apache.org>
Authored: Fri Mar 31 10:48:48 2017 +0530
Committer: ankita <an...@apache.org>
Committed: Fri Mar 31 22:46:46 2017 +0530
----------------------------------------------------------------------
.../plugin/client/HadoopConfigHolder.java | 90 ++--
.../ranger/services/hive/client/HiveClient.java | 466 +++++++++++++------
security-admin/pom.xml | 5 +
.../scripts/views/service/ConfigurationList.js | 6 +-
4 files changed, 394 insertions(+), 173 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ranger/blob/6c0b0625/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
index 00374de..654a7e9 100644
--- a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
+++ b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
@@ -27,6 +27,7 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
+import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.SecureClientLogin;
@@ -52,34 +53,36 @@ public class HadoopConfigHolder {
public static final String HADOOP_NAME_RULES = "hadoop.security.auth_to_local";
public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = "kerberos";
public static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection";
-
+ public static final String ENABLE_HIVE_METASTORE_LOOKUP = "enable.hive.metastore.lookup";
+ public static final String HIVE_SITE_FILE_PATH = "hive.site.file.path";
private static boolean initialized = false;
private static Map<String,HashMap<String,Properties>> dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>();
- private static Properties globalLoginProp = new Properties();
private static Map<String,HadoopConfigHolder> dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>();
+ private static Properties globalLoginProp = new Properties();
private static Properties resourcemapProperties = null;
-
private String datasourceName;
private String defaultConfigFile;
private String userName;
private String keyTabFile;
private String password;
- private boolean isKerberosAuth;
private String lookupPrincipal;
private String lookupKeytab;
private String nameRules;
private String authType;
+ private String hiveSiteFilePath;
+ private boolean isKerberosAuth;
+ private boolean enableHiveMetastoreLookup;
- private Map<String,String> connectionProperties;
+ private Map<String,String> connectionProperties;
private static Set<String> rangerInternalPropertyKeys = new HashSet<String>();
public static HadoopConfigHolder getInstance(String aDatasourceName) {
HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName);
if (ret == null) {
- synchronized(HadoopConfigHolder.class) {
+ synchronized (HadoopConfigHolder.class) {
HadoopConfigHolder temp = ret;
if (temp == null) {
ret = new HadoopConfigHolder(aDatasourceName);
@@ -98,7 +101,7 @@ public class HadoopConfigHolder {
String defaultConfigFile) {
HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName);
if (ret == null) {
- synchronized(HadoopConfigHolder.class) {
+ synchronized (HadoopConfigHolder.class) {
HadoopConfigHolder temp = ret;
if (temp == null) {
ret = new HadoopConfigHolder(aDatasourceName,connectionProperties, defaultConfigFile);
@@ -121,27 +124,34 @@ public class HadoopConfigHolder {
private HadoopConfigHolder(String aDatasourceName) {
datasourceName = aDatasourceName;
- if ( ! initialized ) {
+ if (!initialized) {
init();
}
initLoginInfo();
}
- private HadoopConfigHolder(String aDatasourceName, Map<String,String> connectionProperties) {
- this(aDatasourceName, connectionProperties, null);
- }
+ private HadoopConfigHolder(String aDatasourceName,
+ Map<String, String> connectionProperties) {
+ this(aDatasourceName, connectionProperties, null);
+ }
private HadoopConfigHolder(String aDatasourceName, Map<String,String> connectionProperties,
String defaultConfigFile) {
datasourceName = aDatasourceName;
this.connectionProperties = connectionProperties;
- this.defaultConfigFile = defaultConfigFile;
+ this.defaultConfigFile = defaultConfigFile;
initConnectionProp();
initLoginInfo();
}
private void initConnectionProp() {
- for(String key : connectionProperties.keySet()) {
+ if (!connectionProperties.containsKey(ENABLE_HIVE_METASTORE_LOOKUP)) {
+ connectionProperties.put(ENABLE_HIVE_METASTORE_LOOKUP, "false");
+ }
+ if (!connectionProperties.containsKey(HIVE_SITE_FILE_PATH)) {
+ connectionProperties.put(HIVE_SITE_FILE_PATH, null);
+ }
+ for (String key : connectionProperties.keySet()) {
String resourceName = getResourceName(key);
@@ -160,10 +170,9 @@ public class HadoopConfigHolder {
}
if (resourcemapProperties != null) {
- String rn = resourcemapProperties.getProperty(key);
- return ( rn != null) ? rn : defaultConfigFile;
- }
- else {
+ String rn = resourcemapProperties.getProperty(key);
+ return (rn != null) ? rn : defaultConfigFile;
+ } else {
return defaultConfigFile;
}
}
@@ -178,7 +187,7 @@ public class HadoopConfigHolder {
for (Map.Entry<Object, Object> entry : resourcemapProperties.entrySet() ) {
String key = (String)entry.getKey();
String value = (String)entry.getValue();
- if (RANGER_SECTION_NAME.equals(value)) {
+ if (RANGER_SECTION_NAME.equals(value)) {
rangerInternalPropertyKeys.add(key);
}
}
@@ -190,7 +199,7 @@ public class HadoopConfigHolder {
try {
in.close();
}
- catch(IOException ioe) {
+ catch (IOException ioe) {
// Ignore IOException during close of stream
}
}
@@ -230,7 +239,7 @@ public class HadoopConfigHolder {
if (prop.size() == 0)
return;
- for(Object keyobj : prop.keySet()) {
+ for (Object keyobj : prop.keySet()) {
String key = (String)keyobj;
String val = prop.getProperty(key);
@@ -282,10 +291,23 @@ public class HadoopConfigHolder {
if (prop != null) {
userName = prop.getProperty(RANGER_LOGIN_USER_NAME_PROP);
keyTabFile = prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP);
+ if (!StringUtils.isEmpty(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP).trim())) {
+ try {
+ enableHiveMetastoreLookup = Boolean.valueOf(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP,"false").trim());
+ } catch (Exception e) {
+ enableHiveMetastoreLookup = false;
+ LOG.error("Error while getting " + ENABLE_HIVE_METASTORE_LOOKUP + " : " + e.getMessage());
+ }
+ }
+ if (!StringUtils.isEmpty(prop.getProperty(HIVE_SITE_FILE_PATH))) {
+ hiveSiteFilePath = prop.getProperty(HIVE_SITE_FILE_PATH).trim();
+ } else {
+ hiveSiteFilePath = null;
+ }
String plainTextPwd = prop.getProperty(RANGER_LOGIN_PASSWORD);
try {
password = PasswordUtils.encryptPassword(plainTextPwd);
- }catch (IOException e) {
+ } catch (IOException e) {
throw new HadoopException("Unable to initialize login info", e);
}
@@ -296,8 +318,8 @@ public class HadoopConfigHolder {
String hadoopSecurityAuthenticationn = getHadoopSecurityAuthentication();
- if ( hadoopSecurityAuthenticationn != null) {
- isKerberosAuth = ( hadoopSecurityAuthenticationn.equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
+ if (hadoopSecurityAuthenticationn != null) {
+ isKerberosAuth = (hadoopSecurityAuthenticationn.equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
}
else {
isKerberosAuth = (((userName != null) && (userName.indexOf("@") > -1)) || (SecureClientLogin.isKerberosCredentialExists(lookupPrincipal, lookupKeytab)));
@@ -374,17 +396,17 @@ public class HadoopConfigHolder {
String ret = null;
String sectionName = RANGER_SECTION_NAME;
- if ( defaultConfigFile != null) {
+ if (defaultConfigFile != null) {
sectionName = defaultConfigFile;
}
- if ( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("==> HadoopConfigHolder.getHadoopSecurityAuthentication( " + " DataSource : " + sectionName + " Property : " + HADOOP_SECURITY_AUTHENTICATION + ")" );
}
ret = getProperties(sectionName,HADOOP_SECURITY_AUTHENTICATION);
- if ( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HadoopConfigHolder.getHadoopSecurityAuthentication(" + " DataSource : " + sectionName + " Property : " + HADOOP_SECURITY_AUTHENTICATION + " Value : " + ret + ")" );
}
@@ -423,6 +445,14 @@ public class HadoopConfigHolder {
return authType;
}
+ public boolean isEnableHiveMetastoreLookup() {
+ return enableHiveMetastoreLookup;
+ }
+
+ public String getHiveSiteFilePath() {
+ return hiveSiteFilePath;
+ }
+
public Set<String> getRangerInternalPropertyKeys() {
return rangerInternalPropertyKeys;
@@ -430,7 +460,7 @@ public class HadoopConfigHolder {
private String getProperties(String sectionName, String property) {
- if ( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("==> HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName + " Property : " + property + ")" );
}
@@ -439,15 +469,15 @@ public class HadoopConfigHolder {
HashMap<String,Properties> resourceName2PropertiesMap = dataSource2ResourceListMap.get(this.getDatasourceName());
- if ( resourceName2PropertiesMap != null) {
+ if (resourceName2PropertiesMap != null) {
repoParam=resourceName2PropertiesMap.get(sectionName);
}
- if ( repoParam != null ) {
+ if (repoParam != null) {
ret = (String)repoParam.get(property);
}
- if ( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName + " Property : " + property + " Value : " + ret);
}
http://git-wip-us.apache.org/repos/asf/ranger/blob/6c0b0625/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
index 734c8e7..4bc0da6 100644
--- a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
+++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
@@ -20,6 +20,8 @@
package org.apache.ranger.services.hive.client;
import java.io.Closeable;
+import java.io.File;
+import java.net.MalformedURLException;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.sql.Connection;
@@ -39,17 +41,30 @@ import javax.security.auth.Subject;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.ranger.plugin.client.BaseClient;
import org.apache.ranger.plugin.client.HadoopException;
+import org.apache.thrift.TException;
public class HiveClient extends BaseClient implements Closeable {
private static final Log LOG = LogFactory.getLog(HiveClient.class);
- Connection con = null;
- boolean isKerberosAuth=false;
+ private static final String ERR_MSG = "You can still save the repository and start creating "
+ + "policies, but you would not be able to use autocomplete for "
+ + "resource names. Check ranger_admin.log for more info.";
+
+ private Connection con;
+ private HiveMetaStoreClient hiveClient;
+ private String hiveSiteFilePath;
+ private boolean isKerberosAuth;
+ private boolean enableHiveMetastoreLookup;
public HiveClient(String serviceName) throws Exception {
super(serviceName, null);
@@ -62,6 +77,8 @@ public class HiveClient extends BaseClient implements Closeable {
}
public void initHive() throws Exception {
+ enableHiveMetastoreLookup = getConfigHolder().isEnableHiveMetastoreLookup();
+ hiveSiteFilePath = getConfigHolder().getHiveSiteFilePath();
isKerberosAuth = getConfigHolder().isKerberosAuthentication();
if (isKerberosAuth) {
LOG.info("Secured Mode: JDBC Connection done with preAuthenticated Subject");
@@ -90,7 +107,11 @@ public class HiveClient extends BaseClient implements Closeable {
public List<String> run() {
List<String> ret = null;
try {
- ret = getDBList(dbMatching,dbList);
+ if (enableHiveMetastoreLookup) {
+ ret = getDBListFromHM(dbMatching,dbList);
+ } else {
+ ret = getDBList(dbMatching,dbList);
+ }
} catch ( HadoopException he) {
LOG.error("<== HiveClient getDatabaseList() :Unable to get the Database List", he);
throw he;
@@ -100,6 +121,45 @@ public class HiveClient extends BaseClient implements Closeable {
});
return dblist;
}
+
+ private List<String> getDBListFromHM(String databaseMatching, List<String>dbList) throws HadoopException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HiveClient getDBListFromHM databaseMatching : " + databaseMatching + " ExcludedbList : " + dbList);
+ }
+ List<String> ret = new ArrayList<String>();
+ try {
+ List<String> hiveDBList = null;
+ if (hiveClient != null) {
+ if (databaseMatching.equalsIgnoreCase("*")) {
+ hiveDBList = hiveClient.getAllDatabases();
+ } else {
+ hiveDBList = hiveClient.getDatabases(databaseMatching);
+ }
+ }
+ if (hiveDBList != null) {
+ for (String dbName : hiveDBList) {
+ if (dbList != null && dbList.contains(dbName)) {
+ continue;
+ }
+ ret.add(dbName);
+ }
+ }
+ } catch (MetaException e) {
+ String msgDesc = "Unable to get Database";
+ HadoopException hdpException = new HadoopException(msgDesc,
+ e);
+ hdpException.generateResponseDataMap(false, getMessage(e),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient.getDBListFromHM() Error : " , e);
+ }
+ throw hdpException;
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient.getDBListFromHM(): " + ret);
+ }
+ return ret;
+ }
private List<String> getDBList(String databaseMatching, List<String>dbList) throws HadoopException {
if(LOG.isDebugEnabled()) {
@@ -107,9 +167,6 @@ public class HiveClient extends BaseClient implements Closeable {
}
List<String> ret = new ArrayList<String>();
- String errMsg = " You can still save the repository and start creating "
- + "policies, but you would not be able to use autocomplete for "
- + "resource names. Check ranger_admin.log for more info.";
if (con != null) {
Statement stat = null;
ResultSet rs = null;
@@ -122,7 +179,7 @@ public class HiveClient extends BaseClient implements Closeable {
rs = stat.executeQuery(sql);
while (rs.next()) {
String dbName = rs.getString(1);
- if ( dbList != null && dbList.contains(dbName)) {
+ if (dbList != null && dbList.contains(dbName)) {
continue;
}
ret.add(rs.getString(1));
@@ -133,8 +190,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : ", sqlt);
}
throw hdpException;
@@ -143,8 +200,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : " , sqle);
}
throw hdpException;
@@ -155,7 +212,7 @@ public class HiveClient extends BaseClient implements Closeable {
}
- if(LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList(): " + ret);
}
@@ -169,9 +226,13 @@ public class HiveClient extends BaseClient implements Closeable {
List<String> tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
public List<String> run() {
- List<String> ret = null;
+ List<String> ret = null;
try {
- ret = getTblList(tblNameMatching,dbList,tblList);
+ if (enableHiveMetastoreLookup) {
+ ret = getTblListFromHM(tblNameMatching,dbList,tblList);
+ } else {
+ ret = getTblList(tblNameMatching,dbList,tblList);
+ }
} catch(HadoopException he) {
LOG.error("<== HiveClient getTblList() :Unable to get the Table List", he);
throw he;
@@ -183,15 +244,44 @@ public class HiveClient extends BaseClient implements Closeable {
return tableList;
}
+ private List<String> getTblListFromHM(String tableNameMatching, List<String> dbList, List<String> tblList) throws HadoopException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HiveClient getTblListFromHM() tableNameMatching : " + tableNameMatching + " ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
+ }
+ List<String> ret = new ArrayList<String>();
+ if (hiveClient != null && dbList != null && !dbList.isEmpty()) {
+ for (String dbName : dbList) {
+ try {
+ List<String> hiveTblList = hiveClient.getTables(dbName, tableNameMatching);
+ for (String tblName : hiveTblList) {
+ if (tblList != null && tblList.contains(tblName)) {
+ continue;
+ }
+ ret.add(tblName);
+ }
+ } catch (MetaException e) {
+ String msgDesc = "Unable to get Table.";
+ HadoopException hdpException = new HadoopException(msgDesc,e);
+ hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient.getTblListFromHM() Error : " , e);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient getTblListFromHM() " + ret);
+ }
+ return ret;
+ }
+
public List<String> getTblList(String tableNameMatching, List<String> dbList, List<String> tblList) throws HadoopException {
- if(LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveClient getTableList() tableNameMatching : " + tableNameMatching + " ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
}
List<String> ret = new ArrayList<String>();
- String errMsg = " You can still save the repository and start creating "
- + "policies, but you would not be able to use autocomplete for "
- + "resource names. Check ranger_admin.log for more info.";
if (con != null) {
Statement stat = null;
ResultSet rs = null;
@@ -200,7 +290,7 @@ public class HiveClient extends BaseClient implements Closeable {
try {
if (dbList != null && !dbList.isEmpty()) {
- for ( String db: dbList) {
+ for (String db : dbList) {
sql = "use " + db;
try {
@@ -213,13 +303,13 @@ public class HiveClient extends BaseClient implements Closeable {
}
sql = "show tables ";
- if (tableNameMatching != null && ! tableNameMatching.isEmpty()) {
+ if (tableNameMatching != null && !tableNameMatching.isEmpty()) {
sql = sql + " like \"" + tableNameMatching + "\"";
}
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
- while (rs.next()) {
+ while (rs.next()) {
String tblName = rs.getString(1);
if (tblList != null && tblList.contains(tblName)) {
continue;
@@ -240,8 +330,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getTblList() Error : " , sqlt);
}
throw hdpException;
@@ -250,8 +340,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getTblList() Error : " , sqle);
}
throw hdpException;
@@ -259,7 +349,7 @@ public class HiveClient extends BaseClient implements Closeable {
}
- if(LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient getTableList() " + ret);
}
@@ -282,11 +372,15 @@ public class HiveClient extends BaseClient implements Closeable {
final List<String> tableList = tblList;
final List<String> clmList = colList;
List<String> columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
- public List<String> run() {
+ public List<String> run() {
List<String> ret = null;
try {
- ret = getClmList(clmNameMatching,databaseList,tableList,clmList);
- } catch ( HadoopException he) {
+ if (enableHiveMetastoreLookup) {
+ ret = getClmListFromHM(clmNameMatching,databaseList,tableList,clmList);
+ } else {
+ ret = getClmList(clmNameMatching,databaseList,tableList,clmList);
+ }
+ } catch (HadoopException he) {
LOG.error("<== HiveClient getColumnList() :Unable to get the Column List", he);
throw he;
}
@@ -296,20 +390,62 @@ public class HiveClient extends BaseClient implements Closeable {
return columnList;
}
+ private List<String> getClmListFromHM(String columnNameMatching,List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HiveClient.getClmListFromHM() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
+ }
+ List<String> ret = new ArrayList<String>();
+ String columnNameMatchingRegEx = null;
+
+ if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
+ columnNameMatchingRegEx = columnNameMatching;
+ }
+ if (hiveClient != null && dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) {
+ for (String db : dbList) {
+ for (String tbl : tblList) {
+ try {
+ List<FieldSchema> hiveSch = hiveClient.getFields(db, tbl);
+ for (FieldSchema sch : hiveSch) {
+ String columnName = sch.getName();
+ if (colList != null && colList.contains(columnName)) {
+ continue;
+ }
+ if (columnNameMatchingRegEx == null) {
+ ret.add(columnName);
+ }
+ else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
+ ret.add(columnName);
+ }
+ }
+ } catch (TException e) {
+ String msgDesc = "Unable to get Columns.";
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient.getClmListFromHM() Error : " ,e);
+ }
+ throw hdpException;
+ }
+ }
+ }
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HiveClient.getClmListFromHM() " + ret );
+ }
+ return ret;
+ }
+
public List<String> getClmList(String columnNameMatching,List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
}
List<String> ret = new ArrayList<String>();
- String errMsg = " You can still save the repository and start creating "
- + "policies, but you would not be able to use autocomplete for "
- + "resource names. Check ranger_admin.log for more info.";
if (con != null) {
String columnNameMatchingRegEx = null;
- if (columnNameMatching != null && ! columnNameMatching.isEmpty()) {
+ if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
columnNameMatchingRegEx = columnNameMatching;
}
@@ -320,8 +456,8 @@ public class HiveClient extends BaseClient implements Closeable {
if (dbList != null && !dbList.isEmpty() &&
tblList != null && !tblList.isEmpty()) {
- for (String db: dbList) {
- for(String tbl:tblList) {
+ for (String db : dbList) {
+ for (String tbl : tblList) {
try {
sql = "use " + db;
@@ -355,8 +491,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() Error : " ,sqlt);
}
throw hdpException;
@@ -365,8 +501,8 @@ public class HiveClient extends BaseClient implements Closeable {
HadoopException hdpException = new HadoopException(msgDesc,
sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle),
- msgDesc + errMsg, null, null);
- if(LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() Error : " ,sqle);
}
throw hdpException;
@@ -379,7 +515,7 @@ public class HiveClient extends BaseClient implements Closeable {
}
}
- if(LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getClmList() " + ret );
}
@@ -437,123 +573,174 @@ public class HiveClient extends BaseClient implements Closeable {
private void initConnection(String userName, String password) throws HadoopException {
-
- Properties prop = getConfigHolder().getRangerSection();
- String driverClassName = prop.getProperty("jdbc.driverClassName");
- String url = prop.getProperty("jdbc.url");
- String errMsg = " You can still save the repository and start creating "
- + "policies, but you would not be able to use autocomplete for "
- + "resource names. Check ranger_admin.log for more info.";
-
- if (driverClassName != null) {
+ if (enableHiveMetastoreLookup) {
try {
- Driver driver = (Driver)Class.forName(driverClassName).newInstance();
- DriverManager.registerDriver(driver);
- } catch (SQLException e) {
- String msgDesc = "initConnection: Caught SQLException while registering "
- + "Hive driver, so Unable to connect to Hive Thrift Server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, e);
- hdpException.generateResponseDataMap(false, getMessage(e),
- msgDesc + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
+ HiveConf conf = new HiveConf();
+ if (!StringUtils.isEmpty(hiveSiteFilePath)) {
+ File f = new File(hiveSiteFilePath);
+ if (f.exists()) {
+ conf.addResource(f.toURI().toURL());
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Hive site conf file path " + hiveSiteFilePath + " does not exists for Hive Metastore lookup");
+ }
+ }
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Hive site conf file path property not found for Hive Metastore lookup");
+ }
+ }
+ hiveClient = new HiveMetaStoreClient(conf);
+ } catch (HadoopException he) {
+ String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+ + "So unable to initiate connection to hive thrift server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, he);
+ hdpException.generateResponseDataMap(false, getMessage(he),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
- } catch (IllegalAccessException ilae) {
- String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+ } catch (MalformedURLException e) {
+ String msgDesc = "initConnection: URL might be malformed."
+ "So unable to initiate connection to hive thrift server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, ilae);
- hdpException.generateResponseDataMap(false, getMessage(ilae),
- msgDesc + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
- } catch (InstantiationException ie) {
- String msgDesc = "initConnection: Class may not have its nullary constructor or "
- + "may be the instantiation fails for some other reason."
+ } catch (MetaException e) {
+ String msgDesc = "initConnection: Meta info is not proper."
+ "So unable to initiate connection to hive thrift server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, ie);
- hdpException.generateResponseDataMap(false, getMessage(ie),
- msgDesc + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
+ } catch (Throwable t) {
+ String msgDesc = "Unable to connect to Hive Thrift Server instance";
+ HadoopException hdpException = new HadoopException(msgDesc, t);
+ hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ }
+ } else {
+ Properties prop = getConfigHolder().getRangerSection();
+ String driverClassName = prop.getProperty("jdbc.driverClassName");
+ String url = prop.getProperty("jdbc.url");
+
+ if (driverClassName != null) {
+ try {
+ Driver driver = (Driver)Class.forName(driverClassName).newInstance();
+ DriverManager.registerDriver(driver);
+ } catch (SQLException e) {
+ String msgDesc = "initConnection: Caught SQLException while registering "
+ + "Hive driver, so Unable to connect to Hive Thrift Server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ } catch (IllegalAccessException ilae) {
+ String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+ + "So unable to initiate connection to hive thrift server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, ilae);
+ hdpException.generateResponseDataMap(false, getMessage(ilae),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ } catch (InstantiationException ie) {
+ String msgDesc = "initConnection: Class may not have its nullary constructor or "
+ + "may be the instantiation fails for some other reason."
+ + "So unable to initiate connection to hive thrift server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, ie);
+ hdpException.generateResponseDataMap(false, getMessage(ie),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ } catch (ExceptionInInitializerError eie) {
+ String msgDesc = "initConnection: Got ExceptionInInitializerError, "
+ + "The initialization provoked by this method fails."
+ + "So unable to initiate connection to hive thrift server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, eie);
+ hdpException.generateResponseDataMap(false, getMessage(eie),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ } catch (SecurityException se) {
+ String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance,"
+ + " The caller's class loader is not the same as or an ancestor "
+ + "of the class loader for the current class and invocation of "
+ + "s.checkPackageAccess() denies access to the package of this class.";
+ HadoopException hdpException = new HadoopException(msgDesc, se);
+ hdpException.generateResponseDataMap(false, getMessage(se),
+ msgDesc + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ } catch (Throwable t) {
+ String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
+ + "please provide valid value of field : {jdbc.driverClassName}.";
+ HadoopException hdpException = new HadoopException(msgDesc, t);
+ hdpException.generateResponseDataMap(false, getMessage(t),
+ msgDesc + ERR_MSG, null, "jdbc.driverClassName");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(msgDesc, hdpException);
+ }
+ throw hdpException;
+ }
+ }
+
+ try {
- } catch (ExceptionInInitializerError eie) {
- String msgDesc = "initConnection: Got ExceptionInInitializerError, "
- + "The initialization provoked by this method fails."
- + "So unable to initiate connection to hive thrift server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, eie);
- hdpException.generateResponseDataMap(false, getMessage(eie),
- msgDesc + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
+ if (userName == null && password == null) {
+ con = DriverManager.getConnection(url);
+ }
+ else {
+ con = DriverManager.getConnection(url, userName, password);
+ }
+ } catch (SQLException e) {
+ String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+ HadoopException hdpException = new HadoopException(msgDesc, e);
+ hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
+ + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (SecurityException se) {
- String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance,"
- + " The caller's class loader is not the same as or an ancestor "
- + "of the class loader for the current class and invocation of "
- + "s.checkPackageAccess() denies access to the package of this class.";
+ String msgDesc = "Unable to connect to Hive Thrift Server instance.";
HadoopException hdpException = new HadoopException(msgDesc, se);
- hdpException.generateResponseDataMap(false, getMessage(se),
- msgDesc + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
+ hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
+ + ERR_MSG, null, null);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
- } catch (Throwable t) {
- String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
- + "please provide valid value of field : {jdbc.driverClassName}.";
+ } catch ( Throwable t) {
+ String msgDesc = "Unable to connect to Hive Thrift Server instance";
HadoopException hdpException = new HadoopException(msgDesc, t);
hdpException.generateResponseDataMap(false, getMessage(t),
- msgDesc + errMsg, null, "jdbc.driverClassName");
- if ( LOG.isDebugEnabled()) {
+ msgDesc + ERR_MSG, null, url);
+ if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
- throw hdpException;
+ throw hdpException;
}
}
-
- try {
-
- if (userName == null && password == null) {
- con = DriverManager.getConnection(url);
- }
- else {
- con = DriverManager.getConnection(url, userName, password);
- }
-
- } catch (SQLException e) {
- String msgDesc = "Unable to connect to Hive Thrift Server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, e);
- hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
- + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
- LOG.debug(msgDesc, hdpException);
- }
- throw hdpException;
- } catch (SecurityException se) {
- String msgDesc = "Unable to connect to Hive Thrift Server instance.";
- HadoopException hdpException = new HadoopException(msgDesc, se);
- hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
- + errMsg, null, null);
- if ( LOG.isDebugEnabled()) {
- LOG.debug(msgDesc, hdpException);
- }
- throw hdpException;
- } catch ( Throwable t) {
- String msgDesc = "Unable to connect to Hive Thrift Server instance";
- HadoopException hdpException = new HadoopException(msgDesc, t);
- hdpException.generateResponseDataMap(false, getMessage(t),
- msgDesc + errMsg, null, url);
- if ( LOG.isDebugEnabled()) {
- LOG.debug(msgDesc, hdpException);
- }
- throw hdpException;
- }
}
@@ -581,7 +768,7 @@ public class HiveClient extends BaseClient implements Closeable {
}
else {
if (CollectionUtils.isNotEmpty(dbList)) {
- for (String str : dbList ) {
+ for (String str : dbList) {
System.out.println("database: " + str );
}
}
@@ -593,7 +780,7 @@ public class HiveClient extends BaseClient implements Closeable {
System.out.println("No tables found under database[" + args[1] + "] with table filter [" + args[2] + "]");
}
else {
- for(String str : tableList) {
+ for (String str : tableList) {
System.out.println("Table: " + str);
}
}
@@ -604,7 +791,7 @@ public class HiveClient extends BaseClient implements Closeable {
System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + args[3] + "]");
}
else {
- for (String str : columnList ) {
+ for (String str : columnList) {
System.out.println("Column: " + str);
}
}
@@ -625,9 +812,6 @@ public class HiveClient extends BaseClient implements Closeable {
HiveClient connectionObj = null;
Map<String, Object> responseData = new HashMap<String, Object>();
boolean connectivityStatus = false;
- String errMsg = " You can still save the repository and start creating "
- + "policies, but you would not be able to use autocomplete for "
- + "resource names. Check ranger_admin.log for more info.";
List<String> testResult = null;
try {
connectionObj = new HiveClient(serviceName, connectionProperties);
@@ -642,14 +826,14 @@ public class HiveClient extends BaseClient implements Closeable {
null, null, responseData);
} else {
String failureMsg = "Unable to retrieve any databases using given parameters.";
- generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+ generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + ERR_MSG,
null, null, responseData);
}
}
- } catch ( Exception e) {
+ } catch (Exception e) {
throw e;
} finally {
- if ( connectionObj != null) {
+ if (connectionObj != null) {
connectionObj.close();
}
}
http://git-wip-us.apache.org/repos/asf/ranger/blob/6c0b0625/security-admin/pom.xml
----------------------------------------------------------------------
diff --git a/security-admin/pom.xml b/security-admin/pom.xml
index 77f2b25..fd387a8 100644
--- a/security-admin/pom.xml
+++ b/security-admin/pom.xml
@@ -228,6 +228,11 @@
<version>${sun.jersey.core.version}</version>
</dependency>
<dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-spring</artifactId>
<version>${sun.jersey.spring.version}</version>
http://git-wip-us.apache.org/repos/asf/ranger/blob/6c0b0625/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
----------------------------------------------------------------------
diff --git a/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js b/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
index dcc85ab..961d5d8 100644
--- a/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
+++ b/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
@@ -57,10 +57,12 @@ define(function(require) {
onRender : function() {
},
onInputNameChange : function(e) {
- this.model.set('name', $(e.currentTarget).val());
+ this.model.set('name', $(e.currentTarget).val().trim());
+ this.ui.name.val($(e.currentTarget).val().trim());
},
onInputValueChange : function(e) {
- this.model.set('value', $(e.currentTarget).val());
+ this.model.set('value', $(e.currentTarget).val().trim());
+ this.ui.value.val($(e.currentTarget).val().trim());
},
evDelete : function(){
var that = this;