You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2015/03/19 22:20:47 UTC
ambari git commit: AMBARI-9224. Add HDFS-NFS gateway as a new
component to HDFS in Ambari stack (Brandon Li via alejandro)
Repository: ambari
Updated Branches:
refs/heads/trunk ec5cc9d1a -> c217b8d8b
AMBARI-9224. Add HDFS-NFS gateway as a new component to HDFS in Ambari stack (Brandon Li via alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c217b8d8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c217b8d8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c217b8d8
Branch: refs/heads/trunk
Commit: c217b8d8bd4088108b485af87891155e785d9a57
Parents: ec5cc9d
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Mar 19 14:20:30 2015 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Mar 19 14:20:30 2015 -0700
----------------------------------------------------------------------
.../common-services/HDFS/2.1.0.2.0/metainfo.xml | 22 ++
.../package/scripts/hdfs_nfsgateway.py | 42 +++
.../2.1.0.2.0/package/scripts/nfsgateway.py | 131 +++++++
.../2.1.0.2.0/package/scripts/status_params.py | 1 +
.../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 360 +++++++++++++++++++
ambari-web/app/data/HDP2/config_mapping.js | 17 +-
6 files changed, 572 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
index ba9688a..916d9b0 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
@@ -155,6 +155,28 @@
</component>
</components>
+ <component>
+ <name>NFS_GATEWAY</name>
+ <displayName>NFSGateway</displayName>
+ <cardinality>0+</cardinality>
+ <versionAdvertised>false</versionAdvertised>
+ <category>SLAVE</category>
+ <commandScript>
+ <script>scripts/nfsgateway.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>1200</timeout>
+ </commandScript>
+ <dependencies>
+ <dependency>
+ <name>HDFS/HDFS_CLIENT</name>
+ <scope>host</scope>
+ <auto-deploy>
+ <enabled>true</enabled>
+ </auto-deploy>
+ </dependency>
+ </dependencies>
+ </component>
+
<osSpecifics>
<osSpecific>
<osFamily>any</osFamily>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
new file mode 100644
index 0000000..e908ecf
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
@@ -0,0 +1,42 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.core.resources import Directory
+from utils import service
+from utils import hdfs_directory
+
+
+def nfsgateway(action=None, format=False):
+ import params
+
+ if action == "configure":
+ return
+ elif action == "start" or action == "stop":
+ Directory(params.hadoop_pid_dir_prefix,
+ mode=0755,
+ owner=params.hdfs_user,
+ group=params.user_group
+ )
+ service(
+ action=action,
+ name="nfs3",
+ user=params.hdfs_user,
+ create_pid_dir=True,
+ create_log_dir=True
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
new file mode 100644
index 0000000..d599b8c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -0,0 +1,131 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.security_commons import build_expectations, \
+ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
+ FILE_TYPE_XML
+from hdfs_nfsgateway import nfsgateway
+from hdfs import hdfs
+
+
+class NFSGateway(Script):
+
+ def get_stack_to_component(self):
+ return {"HDP": "hadoop-hdfs-nfs3"}
+
+ def install(self, env):
+ import params
+
+ env.set_params(params)
+
+ self.install_packages(env, params.exclude_packages)
+
+ def pre_rolling_restart(self, env):
+ # it does not need any Rolling Restart logic.
+ pass
+
+ def start(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+
+ self.configure(env)
+ nfsgateway(action="start")
+
+ def stop(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+
+ nfsgateway(action="stop")
+
+ def configure(self, env):
+ import params
+
+ env.set_params(params)
+ hdfs()
+ nfsgateway(action="configure")
+
+ def status(self, env):
+ import status_params
+
+ env.set_params(status_params)
+
+ check_process_status(status_params.nfsgateway_pid_file)
+
+ def security_status(self, env):
+ import status_params
+
+ env.set_params(status_params)
+ props_value_check = {"hadoop.security.authentication": "kerberos",
+ "hadoop.security.authorization": "true"}
+ props_empty_check = ["hadoop.security.auth_to_local"]
+ props_read_check = None
+ core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
+ props_read_check)
+ props_value_check = None
+ props_empty_check = ['nfs.keytab.file',
+ 'nfs.kerberos.principal']
+ props_read_check = ['nfs.keytab.file']
+ hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
+ props_read_check)
+
+ hdfs_expectations = {}
+ hdfs_expectations.update(core_site_expectations)
+ hdfs_expectations.update(hdfs_site_expectations)
+
+ security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
+ {'core-site.xml': FILE_TYPE_XML,
+ 'hdfs-site.xml': FILE_TYPE_XML})
+ if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
+ security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
+ result_issues = validate_security_config_properties(security_params, hdfs_expectations)
+ if not result_issues: # If all validations passed successfully
+ try:
+ # Double check the dict before calling execute
+ if ('hdfs-site' not in security_params or
+ 'nfs.keytab.file' not in security_params['hdfs-site'] or
+ 'nfs.kerberos.principal' not in security_params['hdfs-site']):
+ self.put_structured_out({"securityState": "UNSECURED"})
+ self.put_structured_out(
+ {"securityIssuesFound": "Keytab file or principal are not set property."})
+ return
+
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.hdfs_user,
+ security_params['hdfs-site']['nfs.keytab.file'],
+ security_params['hdfs-site'][
+ 'nfs.kerberos.principal'],
+ status_params.hostname,
+ status_params.tmp_dir)
+ self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+ except Exception as e:
+ self.put_structured_out({"securityState": "ERROR"})
+ self.put_structured_out({"securityStateErrorInfo": str(e)})
+ else:
+ issues = []
+ for cf in result_issues:
+ issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
+ self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
+ self.put_structured_out({"securityState": "UNSECURED"})
+ else:
+ self.put_structured_out({"securityState": "UNSECURED"})
+
+if __name__ == "__main__":
+ NFSGateway().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
index 6ae9b66..c7c5325 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
@@ -29,6 +29,7 @@ namenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
snamenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
journalnode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
zkfc_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
+nfsgateway_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-nfs3.pid")
# Security related/required params
hostname = config['hostname']
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
new file mode 100644
index 0000000..4fdf740
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -0,0 +1,360 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import os
+from stacks.utils.RMFTestCase import *
+from mock.mock import MagicMock, patch
+
+
+class TestNFSGateway(RMFTestCase):
+ COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
+ STACK_VERSION = "2.0.6"
+ UPGRADE_STACK_VERSION = "2.2"
+
+ def test_configure_default(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "configure",
+ config_file = "default.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "start",
+ config_file = "default.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assert_configure_default()
+ self.assertResourceCalled('Directory', '/var/run/hadoop',
+ owner = 'hdfs',
+ group = 'hadoop',
+ mode = 0755
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3'",
+ environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+ not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "stop",
+ config_file = "default.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop',
+ owner = 'hdfs',
+ group = 'hadoop',
+ mode = 0755
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3'",
+ environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+ not_if = None,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_secured(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "configure",
+ config_file = "secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assert_configure_secured()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "start",
+ config_file = "secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assert_configure_secured()
+ self.assertResourceCalled('Directory', '/var/run/hadoop',
+ owner = 'hdfs',
+ group = 'hadoop',
+ mode = 0755
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3'",
+ environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+ not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "stop",
+ config_file = "secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop',
+ owner = 'hdfs',
+ group = 'hadoop',
+ mode = 0755
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3'",
+ environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+ not_if = None,
+ )
+ self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-nfs3.pid',
+ action = ['delete'],
+ )
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/etc/security/limits.d',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+ content = Template('hdfs.conf.j2'),
+ owner = 'root',
+ group = 'root',
+ mode = 0644,
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hdfs',
+ group = 'hadoop',
+ conf_dir = '/etc/hadoop/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'],
+ configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+ )
+ self.assertResourceCalled('XmlConfig', 'core-site.xml',
+ owner = 'hdfs',
+ group = 'hadoop',
+ conf_dir = '/etc/hadoop/conf',
+ configurations = self.getConfig()['configurations']['core-site'],
+ configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+ mode = 0644
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+ content = Template('slaves.j2'),
+ owner = 'hdfs',
+ )
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Directory', '/etc/security/limits.d',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+ content = Template('hdfs.conf.j2'),
+ owner = 'root',
+ group = 'root',
+ mode = 0644,
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hdfs',
+ group = 'hadoop',
+ conf_dir = '/etc/hadoop/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'],
+ configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+ )
+ self.assertResourceCalled('XmlConfig', 'core-site.xml',
+ owner = 'hdfs',
+ group = 'hadoop',
+ conf_dir = '/etc/hadoop/conf',
+ configurations = self.getConfig()['configurations']['core-site'],
+ configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+ mode = 0644
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+ content = Template('slaves.j2'),
+ owner = 'root',
+ )
+
+
+ @patch("resource_management.libraries.functions.security_commons.build_expectations")
+ @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
+ @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
+ @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
+ @patch("resource_management.libraries.script.Script.put_structured_out")
+ def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
+ # Test that function works when is called with correct parameters
+ security_params = {
+ 'core-site': {
+ 'hadoop.security.authentication': 'kerberos'
+ },
+ 'hdfs-site': {
+ 'nfs.keytab.file': 'path/to/nfsgateway/keytab/file',
+ 'nfs.kerberos.principal': 'nfs_principal'
+ }
+ }
+
+ props_value_check = None
+ props_empty_check = ['nfs.keytab.file',
+ 'nfs.kerberos.principal']
+ props_read_check = ['nfs.keytab.file']
+
+ result_issues = []
+
+ get_params_mock.return_value = security_params
+ validate_security_config_mock.return_value = result_issues
+
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "security_status",
+ config_file="secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+
+ build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
+ put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
+ cached_kinit_executor_mock.called_with('/usr/bin/kinit',
+ self.config_dict['configurations']['hadoop-env']['hdfs_user'],
+ security_params['hdfs-site']['nfs.keytab.file'],
+ security_params['hdfs-site']['nfs.kerberos.principal'],
+ self.config_dict['hostname'],
+ '/tmp')
+
+ # Testing when hadoop.security.authentication is simple
+ security_params['core-site']['hadoop.security.authentication'] = 'simple'
+
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "security_status",
+ config_file="secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+
+ put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
+ security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
+
+ # Testing that the exception throw by cached_executor is caught
+ cached_kinit_executor_mock.reset_mock()
+ cached_kinit_executor_mock.side_effect = Exception("Invalid command")
+
+ try:
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "security_status",
+ config_file="secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ except:
+ self.assertTrue(True)
+
+ # Testing with a security_params which doesn't contains hdfs-site
+ empty_security_params = {
+ 'core-site': {
+ 'hadoop.security.authentication': 'kerberos'
+ }
+ }
+ cached_kinit_executor_mock.reset_mock()
+ get_params_mock.reset_mock()
+ put_structured_out_mock.reset_mock()
+ get_params_mock.return_value = empty_security_params
+
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "security_status",
+ config_file="secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+
+ put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
+
+ # Testing with not empty result_issues
+ result_issues_with_params = {
+ 'hdfs-site': "Something bad happened"
+ }
+
+ validate_security_config_mock.reset_mock()
+ get_params_mock.reset_mock()
+ validate_security_config_mock.return_value = result_issues_with_params
+ get_params_mock.return_value = security_params
+
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
+ classname = "NFSGateway",
+ command = "security_status",
+ config_file="secured.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
http://git-wip-us.apache.org/repos/asf/ambari/blob/c217b8d8/ambari-web/app/data/HDP2/config_mapping.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/config_mapping.js b/ambari-web/app/data/HDP2/config_mapping.js
index 1104ceb..2eab405d 100644
--- a/ambari-web/app/data/HDP2/config_mapping.js
+++ b/ambari-web/app/data/HDP2/config_mapping.js
@@ -83,7 +83,22 @@ var configs = [
"filename": "core-site.xml",
"isOverridable": true
},
-
+ {
+ "name": "hadoop.proxyuser.<foreignKey[0]>.groups",
+ "templateName": [],
+ "foreignKey": ["hdfs_user"],
+ "value": "*",
+ "filename": "core-site.xml",
+ "isOverridable": true
+ },
+ {
+ "name": "hadoop.proxyuser.<foreignKey[0]>.hosts",
+ "templateName": [],
+ "foreignKey": ["hdfs_user"],
+ "value": "*",
+ "filename": "core-site.xml",
+ "isOverridable": true
+ },
/**********************************Oozie******************************/
{