You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2015/04/27 19:12:27 UTC
ambari git commit: AMBARI-10773. Hive Ranger plugin fails to install
with non-root agent (aonishuk)
Repository: ambari
Updated Branches:
refs/heads/trunk 52705c266 -> f13d983dd
AMBARI-10773. Hive Ranger plugin fails to install with non-root agent (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f13d983d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f13d983d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f13d983d
Branch: refs/heads/trunk
Commit: f13d983dd0d27ce1c9e25447c9eea11ea72eceff
Parents: 52705c2
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Apr 27 20:12:19 2015 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Apr 27 20:12:19 2015 +0300
----------------------------------------------------------------------
.../libraries/functions/__init__.py | 1 +
.../libraries/functions/ranger_functions.py | 4 +-
.../libraries/functions/setup_ranger_plugin.py | 76 ++++++++
.../0.96.0.2.0/package/scripts/params_linux.py | 15 +-
.../package/scripts/setup_ranger_hbase.py | 54 +-----
.../2.1.0.2.0/package/scripts/params_linux.py | 17 +-
.../package/scripts/setup_ranger_hdfs.py | 54 +-----
.../0.12.0.2.0/package/scripts/params_linux.py | 123 ++++++------
.../package/scripts/setup_ranger_hive.py | 192 +------------------
.../ranger-hive-plugin-properties.xml | 48 +++++
.../python/stacks/2.0.6/configs/default.json | 61 +++++-
.../stacks/2.0.6/configs/default_client.json | 33 +++-
.../2.0.6/configs/default_no_install.json | 29 ++-
.../python/stacks/2.0.6/configs/secured.json | 60 +++++-
.../stacks/2.0.6/configs/secured_client.json | 33 +++-
.../test/python/stacks/2.1/configs/default.json | 30 +++
.../test/python/stacks/2.1/configs/secured.json | 30 +++
.../python/stacks/2.2/configs/hive-upgrade.json | 31 ++-
18 files changed, 506 insertions(+), 385 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index f4822ce..e06d246 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -41,6 +41,7 @@ from resource_management.libraries.functions.constants import *
from resource_management.libraries.functions.get_hdp_version import *
from resource_management.libraries.functions.get_lzo_packages import *
from resource_management.libraries.functions.dynamic_variable_interpretation import *
+from resource_management.libraries.functions.setup_ranger_plugin import *
IS_WINDOWS = platform.system() == "Windows"
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py
index e47f1dc..63e5df1 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/ranger_functions.py
@@ -23,8 +23,8 @@ from StringIO import StringIO as BytesIO
import json
from resource_management.core.logger import Logger
import urllib2, base64, httplib
-from resource_management import *
-
+from resource_management.core.exceptions import Fail
+from resource_management.libraries.functions.format import format
class Rangeradmin:
sInstance = None
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py
new file mode 100644
index 0000000..da5c559
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+__all__ = ["setup_ranger_plugin"]
+
+import os
+from resource_management.libraries.functions.ranger_functions import Rangeradmin
+from resource_management.core.resources import File
+from resource_management.core.resources import Execute
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.core.logger import Logger
+from resource_management.core.source import DownloadSource
+from resource_management.libraries.resources import ModifyPropertiesFile
+from resource_management.core.exceptions import Fail
+
+def setup_ranger_plugin(component_select_name, service_name,
+ downloaded_custom_connector, driver_curl_source,
+ driver_curl_target, java_home,
+ repo_name, plugin_repo_dict,
+ ranger_env_properties, plugin_properties,
+ policy_user, policymgr_mgr_url,
+ plugin_enabled):
+ File(downloaded_custom_connector,
+ content = DownloadSource(driver_curl_source)
+ )
+
+ Execute(('cp', '--remove-destination', downloaded_custom_connector, driver_curl_target),
+ not_if=format("test -f {driver_curl_target}"),
+ sudo=True
+ )
+
+ hdp_version = get_hdp_version(component_select_name)
+ file_path = format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/install.properties')
+
+ if not os.path.isfile(file_path):
+ raise Fail(format('Ranger {service_name} plugin install.properties file does not exist at {file_path}'))
+
+ ModifyPropertiesFile(file_path,
+ properties = plugin_properties
+ )
+
+ if plugin_enabled:
+ cmd = (format('enable-{service_name}-plugin.sh'),)
+
+ ranger_adm_obj = Rangeradmin(url=policymgr_mgr_url)
+ ranger_adm_obj.create_ranger_repository(service_name, repo_name, plugin_repo_dict,
+ ranger_env_properties['ranger_admin_username'], ranger_env_properties['ranger_admin_password'],
+ ranger_env_properties['admin_username'], ranger_env_properties['admin_password'],
+ policy_user)
+ else:
+ cmd = (format('disable-{service_name}-plugin.sh'),)
+
+ cmd_env = {'JAVA_HOME': java_home, 'PWD': format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin'), 'PATH': format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin')}
+
+ Execute(cmd,
+ environment=cmd_env,
+ logoutput=True,
+ sudo=True,
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index a9e2da6..89396c8 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -176,13 +176,7 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
hbase_cmd = format("/usr/hdp/current/hbase-{role_root}/bin/hbase")
if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
- # Setting Flag value for ranger hbase plugin
- enable_ranger_hbase = False
- ranger_plugin_enable = config['configurations']['ranger-hbase-plugin-properties']['ranger-hbase-plugin-enabled']
- if ranger_plugin_enable.lower() == 'yes':
- enable_ranger_hbase = True
- elif ranger_plugin_enable.lower() == 'no':
- enable_ranger_hbase = False
+ enable_ranger_hive = (config['configurations']['ranger-hbase-plugin-properties']['ranger-hbase-plugin-enabled'].lower() == 'yes')
# ranger host
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
@@ -211,11 +205,8 @@ hadoop_security_authentication = config['configurations']['core-site']['hadoop.s
repo_config_username = config['configurations']['ranger-hbase-plugin-properties']['REPOSITORY_CONFIG_USERNAME']
repo_config_password = config['configurations']['ranger-hbase-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
-admin_uname = config['configurations']['ranger-env']['admin_username']
-admin_password = config['configurations']['ranger-env']['admin_password']
-
-ambari_ranger_admin = config['configurations']['ranger-env']['ranger_admin_username']
-ambari_ranger_password = config['configurations']['ranger-env']['ranger_admin_password']
+ranger_env = config['configurations']['ranger-env']
+ranger_plugin_properties = config['configurations']['ranger-hbase-plugin-properties']
policy_user = config['configurations']['ranger-hbase-plugin-properties']['policy_user']
#For curl command in ranger plugin to get db connector
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
index b9c6241..e767d32 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
@@ -17,57 +17,19 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
from resource_management import *
-from resource_management.libraries.functions.ranger_functions import Rangeradmin
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
def setup_ranger_hbase():
import params
if params.has_ranger_admin:
- File(params.downloaded_custom_connector,
- content = DownloadSource(params.driver_curl_source)
- )
-
- Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target),
- path=["/bin", "/usr/bin/"],
- not_if=format("test -f {driver_curl_target}"),
- sudo=True)
-
- hdp_version = get_hdp_version('hbase-client')
- file_path = format('/usr/hdp/{hdp_version}/ranger-hbase-plugin/install.properties')
-
- if not os.path.isfile(file_path):
- raise Fail(format('Ranger HBase plugin install.properties file does not exist at {file_path}'))
-
- ModifyPropertiesFile(file_path,
- properties = params.config['configurations']['ranger-hbase-plugin-properties']
- )
-
- if params.enable_ranger_hbase:
- cmd = ('enable-hbase-plugin.sh',)
-
- ranger_adm_obj = Rangeradmin(url=params.policymgr_mgr_url)
- ranger_adm_obj.create_ranger_repository('hbase', params.repo_name, params.hbase_ranger_plugin_repo,
- params.ambari_ranger_admin, params.ambari_ranger_password,
- params.admin_uname, params.admin_password,
- params.policy_user)
- else:
- cmd = ('disable-hbase-plugin.sh',)
-
- cmd_env = {'JAVA_HOME': params.java64_home, 'PWD': format('/usr/hdp/{hdp_version}/ranger-hbase-plugin'), 'PATH': format('/usr/hdp/{hdp_version}/ranger-hbase-plugin')}
-
- Execute(cmd,
- environment=cmd_env,
- logoutput=True,
- sudo=True,
- )
+ setup_ranger_plugin('hbase-client', 'hbase',
+ params.downloaded_custom_connector, params.driver_curl_source,
+ params.driver_curl_target, params.java64_home,
+ params.repo_name, params.hbase_ranger_plugin_repo,
+ params.ranger_env, params.ranger_plugin_properties,
+ params.policy_user, params.policymgr_mgr_url,
+ params.enable_ranger_hbase
+ )
else:
Logger.info('Ranger admin not installed')
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 0747b18..d98c30a 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -325,13 +325,7 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
has_ranger_admin = not len(ranger_admin_hosts) == 0
if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
- # setting flag value for ranger hdfs plugin
- enable_ranger_hdfs = False
- ranger_plugin_enable = default("/configurations/ranger-hdfs-plugin-properties/ranger-hdfs-plugin-enabled", "no")
- if ranger_plugin_enable.lower() == 'yes':
- enable_ranger_hdfs = True
- elif ranger_plugin_enable.lower() == 'no':
- enable_ranger_hdfs = False
+ enable_ranger_hive = (config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled'].lower() == 'yes')
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
@@ -359,12 +353,9 @@ if security_enabled:
sn_principal_name = default("/configurations/hdfs-site/dfs.secondary.namenode.kerberos.principal", "nn/_HOST@EXAMPLE.COM")
sn_principal_name = sn_principal_name.replace('_HOST',hostname.lower())
-admin_uname = config['configurations']['ranger-env']['admin_username']
-admin_password = config['configurations']['ranger-env']['admin_password']
-
-ambari_ranger_admin = config['configurations']['ranger-env']['ranger_admin_username']
-ambari_ranger_password = config['configurations']['ranger-env']['ranger_admin_password']
-policy_user = config['configurations']['ranger-hbase-plugin-properties']['policy_user']
+ranger_env = config['configurations']['ranger-env']
+ranger_plugin_properties = config['configurations']['ranger-hdfs-plugin-properties']
+policy_user = config['configurations']['ranger-hdfs-plugin-properties']['policy_user']
#For curl command in ranger plugin to get db connector
jdk_location = config['hostLevelParams']['jdk_location']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
index 595fc3f..e73398f 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
@@ -17,57 +17,19 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
from resource_management import *
-from resource_management.libraries.functions.ranger_functions import Rangeradmin
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
def setup_ranger_hdfs():
import params
if params.has_ranger_admin:
- File(params.downloaded_custom_connector,
- content = DownloadSource(params.driver_curl_source)
- )
-
- Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.driver_curl_target),
- path=["/bin", "/usr/bin/"],
- not_if=format("test -f {driver_curl_target}"),
- sudo=True)
-
- hdp_version = get_hdp_version('hadoop-client')
- file_path = format('/usr/hdp/{hdp_version}/ranger-hdfs-plugin/install.properties')
-
- if not os.path.isfile(file_path):
- raise Fail(format('Ranger HBase plugin install.properties file does not exist at {file_path}'))
-
- ModifyPropertiesFile(file_path,
- properties = params.config['configurations']['ranger-hdfs-plugin-properties']
- )
-
- if params.enable_ranger_hdfs:
- cmd = ('enable-hdfs-plugin.sh',)
-
- ranger_adm_obj = Rangeradmin(url=params.policymgr_mgr_url)
- ranger_adm_obj.create_ranger_repository('hdfs', params.repo_name, params.hdfs_ranger_plugin_repo,
- params.ambari_ranger_admin, params.ambari_ranger_password,
- params.admin_uname, params.admin_password,
- params.policy_user)
- else:
- cmd = ('disable-hdfs-plugin.sh',)
-
- cmd_env = {'JAVA_HOME': params.java_home, 'PWD': format('/usr/hdp/{hdp_version}/ranger-hdfs-plugin'), 'PATH': format('/usr/hdp/{hdp_version}/ranger-hdfs-plugin')}
-
- Execute(cmd,
- environment=cmd_env,
- logoutput=True,
- sudo=True,
- )
+ setup_ranger_plugin('hadoop-client', 'hdfs',
+ params.downloaded_custom_connector, params.driver_curl_source,
+ params.driver_curl_target, params.java_home,
+ params.repo_name, params.hdfs_ranger_plugin_repo,
+ params.ranger_env, params.ranger_plugin_properties,
+ params.policy_user, params.policymgr_mgr_url,
+ params.enable_ranger_hdfs
+ )
else:
Logger.info('Ranger admin not installed')
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 976e433..dae198e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -24,6 +24,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
from resource_management.libraries.functions.default import default
from resource_management import *
import status_params
+import json
import os
# server configurations
@@ -348,80 +349,64 @@ HdfsDirectory = functools.partial(
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
has_ranger_admin = not len(ranger_admin_hosts) == 0
if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
- # setting flag value for ranger hive plugin
- enable_ranger_hive = False
- ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
- if ranger_plugin_enable.lower() == 'yes':
- enable_ranger_hive = True
- elif ranger_plugin_enable.lower() == 'no':
- enable_ranger_hive = False
+ enable_ranger_hive = (config['configurations']['ranger-hive-plugin-properties']['ranger-hive-plugin-enabled'].lower() == 'yes')
#ranger hive properties
-policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
-sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
-xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
+policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
+sql_connector_jar = config['configurations']['admin-properties']['SQL_CONNECTOR_JAR']
+xa_audit_db_flavor = config['configurations']['admin-properties']['DB_FLAVOR']
+xa_audit_db_name = config['configurations']['admin-properties']['audit_db_name']
+xa_audit_db_user = config['configurations']['admin-properties']['audit_db_user']
+xa_audit_db_password = config['configurations']['admin-properties']['audit_db_password']
+xa_db_host = config['configurations']['admin-properties']['db_host']
repo_name = str(config['clusterName']) + '_hive'
-db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
-hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
-hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
-hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
-hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
-hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
-hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
-hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
-hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
-ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
-ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
-grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
-
-jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
-common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
-
-repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
-repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
-
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
-
-ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
+
+jdbc_driver_class_name = config['configurations']['ranger-hive-plugin-properties']['jdbc.driverClassName']
+common_name_for_certificate = config['configurations']['ranger-hive-plugin-properties']['common.name.for.certificate']
+
+repo_config_username = config['configurations']['ranger-hive-plugin-properties']['REPOSITORY_CONFIG_USERNAME']
+repo_config_password = config['configurations']['ranger-hive-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']
+
+ranger_env = config['configurations']['ranger-env']
+ranger_plugin_properties = config['configurations']['ranger-hive-plugin-properties']
+policy_user = config['configurations']['ranger-hive-plugin-properties']['policy_user']
#For curl command in ranger plugin to get db connector
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
- ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
- ranger_jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
- ranger_jdbc_jar_name = "ojdbc6.jar"
- ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres':
- ranger_jdbc_jar_name = "postgresql.jar"
- ranger_jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver':
- ranger_jdbc_jar_name = "sqljdbc4.jar"
- ranger_jdbc_symlink_name = "mssql-jdbc-driver.jar"
-
-ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
-
-ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
-ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
+if has_ranger_admin:
+ if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
+ ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
+ ranger_jdbc_jar_name = "mysql-connector-java.jar"
+ elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
+ ranger_jdbc_jar_name = "ojdbc6.jar"
+ ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
+ elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres':
+ ranger_jdbc_jar_name = "postgresql.jar"
+ ranger_jdbc_symlink_name = "postgres-jdbc-driver.jar"
+ elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver':
+ ranger_jdbc_jar_name = "sqljdbc4.jar"
+ ranger_jdbc_symlink_name = "mssql-jdbc-driver.jar"
+
+ ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
+
+ ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
+ ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
if security_enabled:
hive_principal = hive_server_principal.replace('_HOST',hostname.lower())
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.3') >= 0:
- solr_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.SOLR.IS_ENABLED", "false")
- solr_max_queue_size = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.SOLR.MAX_QUEUE_SIZE", "1")
- solr_max_flush_interval = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS", "1000")
- solr_url = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.SOLR.SOLR_URL", "http://localhost:6083/solr/ranger_audits")
+
+hive_ranger_plugin_config = {
+ 'username': repo_config_username,
+ 'password': repo_config_password,
+ 'jdbc.driverClassName': jdbc_driver_class_name,
+ 'jdbc.url': format("{hive_url}/default;principal={hive_principal}") if security_enabled else hive_url,
+ 'commonNameForCertificate': common_name_for_certificate
+}
+
+hive_ranger_plugin_repo = {
+ 'isActive': 'true',
+ 'config': json.dumps(hive_ranger_plugin_config),
+ 'description': 'hive repo',
+ 'name': repo_name,
+ 'repositoryType': 'hive',
+ 'assetType': '3'
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
index c202241..f6b6f94 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
@@ -17,191 +17,19 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
from resource_management import *
-from resource_management.libraries.functions.ranger_functions import Rangeradmin
-from resource_management.core.logger import Logger
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
def setup_ranger_hive():
import params
-
+
if params.has_ranger_admin:
- File(params.ranger_downloaded_custom_connector,
- content = DownloadSource(params.ranger_driver_curl_source),
- )
-
- if not os.path.isfile(params.ranger_driver_curl_target):
- Execute(('cp', '--remove-destination', params.ranger_downloaded_custom_connector, params.ranger_driver_curl_target),
- path=["/bin", "/usr/bin/"],
- sudo=True)
-
- try:
- command = 'hdp-select status hive-server2'
- return_code, hdp_output = shell.call(command, timeout=20)
- except Exception, e:
- Logger.error(str(e))
- raise Fail('Unable to execute hdp-select command to retrieve the version.')
-
- if return_code != 0:
- raise Fail('Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
-
- hdp_version = re.sub('hive-server2 - ', '', hdp_output).strip()
- match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
-
- if match is None:
- raise Fail('Failed to get extracted version')
-
- file_path = '/usr/hdp/'+ hdp_version +'/ranger-hive-plugin/install.properties'
- if not os.path.isfile(file_path):
- raise Fail('Ranger Hive plugin install.properties file does not exist at {0}'.format(file_path))
-
- ranger_hive_dict = ranger_hive_properties()
- hive_repo_data = hive_repo_properties()
-
- write_properties_to_file(file_path, ranger_hive_dict)
-
- if params.enable_ranger_hive:
- cmd = format('cd /usr/hdp/{hdp_version}/ranger-hive-plugin/ && sh enable-hive-plugin.sh')
- ranger_adm_obj = Rangeradmin(url=ranger_hive_dict['POLICY_MGR_URL'])
- response_code, response_recieved = ranger_adm_obj.check_ranger_login_urllib2(ranger_hive_dict['POLICY_MGR_URL'] + '/login.jsp', 'test:test')
-
- if response_code is not None and response_code == 200:
- ambari_ranger_admin, ambari_ranger_password = ranger_adm_obj.create_ambari_admin_user(params.ambari_ranger_admin, params.ambari_ranger_password, params.admin_uname_password)
- ambari_username_password_for_ranger = ambari_ranger_admin + ':' + ambari_ranger_password
- if ambari_ranger_admin != '' and ambari_ranger_password != '':
- repo = ranger_adm_obj.get_repository_by_name_urllib2(ranger_hive_dict['REPOSITORY_NAME'], 'hive', 'true', ambari_username_password_for_ranger)
-
- if repo and repo['name'] == ranger_hive_dict['REPOSITORY_NAME']:
- Logger.info('Hive Repository exist')
- else:
- response = ranger_adm_obj.create_repository_urllib2(hive_repo_data,ambari_username_password_for_ranger, params.policy_user)
- if response is not None:
- Logger.info('Hive Repository created in Ranger Admin')
- else:
- Logger.info('Hive Repository creation failed in Ranger Admin')
- else:
- Logger.info('Ambari admin username and password are blank ')
- else:
- Logger.info('Ranger service is not started on given host')
- else:
- cmd = format('cd /usr/hdp/{hdp_version}/ranger-hive-plugin/ && sh disable-hive-plugin.sh')
-
- Execute(cmd, environment={'JAVA_HOME': params.java64_home}, logoutput=True)
+ setup_ranger_plugin('hive-server2', 'hive',
+ params.downloaded_custom_connector, params.driver_curl_source,
+ params.driver_curl_target, params.java64_home,
+ params.repo_name, params.hive_ranger_plugin_repo,
+ params.ranger_env, params.ranger_plugin_properties,
+ params.policy_user, params.policymgr_mgr_url,
+ params.enable_ranger_hive
+ )
else:
- Logger.info('Ranger admin not installed')
-
-
-def write_properties_to_file(file_path, value):
- for key in value:
- modify_config(file_path, key, value[key])
-
-
-def modify_config(filepath, variable, setting):
- var_found = False
- already_set = False
- V=str(variable)
- S=str(setting)
- # use quotes if setting has spaces #
- if ' ' in S:
- S = '%s' % S
-
- for line in fileinput.input(filepath, inplace = 1):
- # process lines that look like config settings #
- if not line.lstrip(' ').startswith('#') and '=' in line:
- _infile_var = str(line.split('=')[0].rstrip(' '))
- _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
- # only change the first matching occurrence #
- if var_found == False and _infile_var.rstrip(' ') == V:
- var_found = True
- # don't change it if it is already set #
- if _infile_set.lstrip(' ') == S:
- already_set = True
- else:
- line = "%s=%s\n" % (V, S)
-
- sys.stdout.write(line)
-
- # Append the variable if it wasn't found #
- if not var_found:
- with open(filepath, "a") as f:
- f.write("%s=%s\n" % (V, S))
- elif already_set == True:
- pass
- else:
- pass
-
- return
-
-def ranger_hive_properties():
- import params
-
- ranger_hive_properties = dict()
-
- ranger_hive_properties['POLICY_MGR_URL'] = params.policymgr_mgr_url
- ranger_hive_properties['SQL_CONNECTOR_JAR'] = params.sql_connector_jar
- ranger_hive_properties['XAAUDIT.DB.FLAVOUR'] = params.xa_audit_db_flavor
- ranger_hive_properties['XAAUDIT.DB.DATABASE_NAME'] = params.xa_audit_db_name
- ranger_hive_properties['XAAUDIT.DB.USER_NAME'] = params.xa_audit_db_user
- ranger_hive_properties['XAAUDIT.DB.PASSWORD'] = params.xa_audit_db_password
- ranger_hive_properties['XAAUDIT.DB.HOSTNAME'] = params.xa_db_host
- ranger_hive_properties['REPOSITORY_NAME'] = params.repo_name
- ranger_hive_properties['XAAUDIT.DB.IS_ENABLED'] = params.db_enabled
-
- ranger_hive_properties['XAAUDIT.HDFS.IS_ENABLED'] = params.hdfs_enabled
- ranger_hive_properties['XAAUDIT.HDFS.DESTINATION_DIRECTORY'] = params.hdfs_dest_dir
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY'] = params.hdfs_buffer_dir
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY'] = params.hdfs_archive_dir
- ranger_hive_properties['XAAUDIT.HDFS.DESTINTATION_FILE'] = params.hdfs_dest_file
- ranger_hive_properties['XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS'] = params.hdfs_dest_flush_int_sec
- ranger_hive_properties['XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_dest_rollover_int_sec
- ranger_hive_properties['XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS'] = params.hdfs_dest_open_retry_int_sec
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FILE'] = params.hdfs_buffer_file
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS'] = params.hdfs_buffer_flush_int_sec
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS'] = params.hdfs_buffer_rollover_int_sec
- ranger_hive_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT'] = params.hdfs_archive_max_file_count
-
- ranger_hive_properties['SSL_KEYSTORE_FILE_PATH'] = params.ssl_keystore_file
- ranger_hive_properties['SSL_KEYSTORE_PASSWORD'] = params.ssl_keystore_password
- ranger_hive_properties['SSL_TRUSTSTORE_FILE_PATH'] = params.ssl_truststore_file
- ranger_hive_properties['SSL_TRUSTSTORE_PASSWORD'] = params.ssl_truststore_password
-
- ranger_hive_properties['UPDATE_XAPOLICIES_ON_GRANT_REVOKE'] = params.grant_revoke
-
- if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0:
- ranger_hive_properties['XAAUDIT.SOLR.IS_ENABLED'] = str(params.solr_enabled).lower()
- ranger_hive_properties['XAAUDIT.SOLR.MAX_QUEUE_SIZE'] = params.solr_max_queue_size
- ranger_hive_properties['XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS'] = params.solr_max_flush_interval
- ranger_hive_properties['XAAUDIT.SOLR.SOLR_URL'] = params.solr_url
-
- return ranger_hive_properties
-
-def hive_repo_properties():
- import params
-
- config_dict = dict()
- config_dict['username'] = params.repo_config_username
- config_dict['password'] = params.repo_config_password
- config_dict['jdbc.driverClassName'] = params.jdbc_driver_class_name
- if params.security_enabled:
- config_dict['jdbc.url'] = format("{params.hive_url}/default;principal={params.hive_principal}")
- else:
- config_dict['jdbc.url'] = params.hive_url
- config_dict['commonNameForCertificate'] = params.common_name_for_certificate
-
- repo= dict()
- repo['isActive'] = "true"
- repo['config'] = json.dumps(config_dict)
- repo['description'] = "hive repo"
- repo['name'] = params.repo_name
- repo['repositoryType'] = "hive"
- repo['assetType'] = '3'
-
- data = json.dumps(repo)
-
- return data
+ Logger.info('Ranger admin not installed')
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/ranger-hive-plugin-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/ranger-hive-plugin-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/ranger-hive-plugin-properties.xml
index 2110157..f893020 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/ranger-hive-plugin-properties.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/ranger-hive-plugin-properties.xml
@@ -163,6 +163,54 @@
<name>UPDATE_XAPOLICIES_ON_GRANT_REVOKE</name>
<value>true</value>
<description></description>
+ </property>
+
+ <property>
+ <name>POLICY_MGR_URL</name>
+ <value>{{policymgr_mgr_url}}</value>
+ <description>Policy Manager url</description>
+ </property>
+
+ <property>
+ <name>SQL_CONNECTOR_JAR</name>
+ <value>{{sql_connector_jar}}</value>
+ <description>Location of DB client library (please check the location of the jar file)</description>
+ </property>
+
+ <property>
+ <name>XAAUDIT.DB.FLAVOUR</name>
+ <value>{{xa_audit_db_flavor}}</value>
+ <description>The database type to be used (mysql/oracle)</description>
+ </property>
+
+ <property>
+ <name>XAAUDIT.DB.DATABASE_NAME</name>
+ <value>{{xa_audit_db_name}}</value>
+ <description>Audit database name</description>
</property>
+
+ <property>
+ <name>XAAUDIT.DB.USER_NAME</name>
+ <value>{{xa_audit_db_user}}</value>
+ <description>Audit database user</description>
+ </property>
+
+ <property>
+ <name>XAAUDIT.DB.PASSWORD</name>
+ <value>{{xa_audit_db_password}}</value>
+ <description>Audit database password</description>
+ </property>
+
+ <property>
+ <name>XAAUDIT.DB.HOSTNAME</name>
+ <value>{{xa_db_host}}</value>
+ <description>Audit database password</description>
+ </property>
+
+ <property>
+ <name>REPOSITORY_NAME</name>
+ <value>{{repo_name}}</value>
+ <description>Ranger repository name</description>
+ </property>
</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index d4ee460..636047e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -376,11 +376,64 @@
"hive.optimize.mapjoin.mapreduce": "true",
"hive.exec.scratchdir" : "/custompath/tmp/hive"
},
- "ranger-hive-plugin-properties": {
- "ranger-hive-plugin-enabled":"yes"
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
- "ranger-knox-plugin-properties": {
- "ranger-knox-plugin-enabled":"yes"
+ "ranger-knox-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "KNOX_HOME": "/usr/hdp/current/knox-server",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "ranger-knox-plugin-enabled": "No",
+ "policy_user": "ambari-qa",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "REPOSITORY_CONFIG_USERNAME": "admin",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "admin-password",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
"yarn-site": {
"yarn.nodemanager.disk-health-checker.min-healthy-disks": "0.25",
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json
index f65fc64..c4f5527 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json
@@ -199,6 +199,36 @@
"storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
"logviewer.port": "8000",
"topology.debug": "false"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
"webhcat-site": {
"templeton.pig.path": "pig.tar.gz/pig/bin/pig",
@@ -332,9 +362,6 @@
"ipc.client.connection.maxidletime": "30000",
"ipc.client.connect.max.retries": "50"
},
- "ranger-hive-plugin-properties" : {
- "ranger-hive-plugin-enabled":"yes"
- },
"hive-site": {
"hive.enforce.sorting": "true",
"javax.jdo.option.ConnectionPassword": "!`\"' 1",
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
index 752af67..ac95268 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
@@ -379,7 +379,34 @@
"hive.exec.scratchdir" : "/custompath/tmp/hive"
},
"ranger-hive-plugin-properties": {
- "ranger-hive-plugin-enabled":"yes"
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
"ranger-knox-plugin-properties": {
"ranger-knox-plugin-enabled":"yes"
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 7ce2bd5..3faf7a1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -374,8 +374,64 @@
"ranger-hdfs-plugin-properties" : {
"ranger-hdfs-plugin-enabled":"yes"
},
- "ranger-hive-plugin-properties" : {
- "ranger-hive-plugin-enabled":"yes"
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "ranger-knox-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "KNOX_HOME": "/usr/hdp/current/knox-server",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "ranger-knox-plugin-enabled": "No",
+ "policy_user": "ambari-qa",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "REPOSITORY_CONFIG_USERNAME": "admin",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "admin-password",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
"hive-site": {
"hive.enforce.sorting": "true",
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json
index 0d809e1..8971c59 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json
@@ -77,6 +77,36 @@
"mapreduce.jobhistory.webapp.spnego-principal": "HTTP/_HOST@EXAMPLE.COM",
"yarn.app.mapreduce.am.command-opts": "-Xmx546m",
"mapreduce.admin.map.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
},
"oozie-site": {
"oozie.service.PurgeService.purge.interval": "3600",
@@ -367,9 +397,6 @@
"hadoop.proxyuser.oozie.hosts": "c6402.ambari.apache.org",
"ipc.client.connection.maxidletime": "30000"
},
- "ranger-hive-plugin-properties" : {
- "ranger-hive-plugin-enabled":"yes"
- },
"hive-site": {
"hive.enforce.sorting": "true",
"javax.jdo.option.ConnectionPassword": "!`\"' 1",
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.1/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/default.json b/ambari-server/src/test/python/stacks/2.1/configs/default.json
index 050dd92..722c402 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/default.json
@@ -675,6 +675,36 @@
},
"zookeeper-log4j": {
"property1": "value1"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
}
},
"configuration_attributes": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured.json b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
index 699d53b..6fc5540 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
@@ -661,6 +661,36 @@
},
"zookeeper-log4j": {
"property1": "value1"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
}
},
"configuration_attributes": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/f13d983d/ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
index 1e3fcc7..c4d7f6b 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
@@ -391,8 +391,35 @@
"mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
"pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
},
- "ranger-hive-plugin-properties" : {
- "ranger-hive-plugin-enabled":"yes"
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
}
},
"configurationTags": {