You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ab...@apache.org on 2017/07/14 12:38:08 UTC
[18/31] ambari git commit: AMBARI-21432 - Allow Services To Be
Stopped During an EU Between Stack Vendors (jonathanhurley)
AMBARI-21432 - Allow Services To Be Stopped During an EU Between Stack Vendors (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/880853a6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/880853a6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/880853a6
Branch: refs/heads/branch-feature-logsearch-ui
Commit: 880853a665dc07c68ec5f05975e01eba7bb561ee
Parents: 51e62ad
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Sun Jul 9 18:18:22 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Jul 11 10:31:12 2017 -0400
----------------------------------------------------------------------
.../libraries/functions/conf_select.py | 56 +++++---------------
.../2.0.6/HBASE/test_phoenix_queryserver.py | 23 --------
.../stacks/2.0.6/YARN/test_historyserver.py | 21 +-------
3 files changed, 15 insertions(+), 85 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/880853a6/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 3e01cf6..4f11633 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -399,7 +399,6 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
stack_root = Script.get_stack_root()
stack_version = Script.get_stack_version()
version = None
- allow_setting_conf_select_symlink = False
if not Script.in_stack_upgrade():
# During normal operation, the HDP stack must be 2.3 or higher
@@ -413,27 +412,10 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
if not os.path.islink(hadoop_conf_dir) and stack_name and version:
version = str(version)
- allow_setting_conf_select_symlink = True
else:
- # During an upgrade/downgrade, which can be a Rolling or Express Upgrade, need to calculate it based on the version
- '''
- Whenever upgrading to HDP 2.2, or downgrading back to 2.2, need to use /etc/hadoop/conf
- Whenever upgrading to HDP 2.3, or downgrading back to 2.3, need to use a versioned hadoop conf dir
-
- Type__|_Source_|_Target_|_Direction_____________|_Comment_____________________________________________________________
- Normal| | 2.2 | | Use /etc/hadoop/conf
- Normal| | 2.3 | | Use /etc/hadoop/conf, which should be a symlink to <stack-root>/current/hadoop-client/conf
- EU | 2.1 | 2.3 | Upgrade | Use versioned <stack-root>/current/hadoop-client/conf
- | | | No Downgrade Allowed | Invalid
- EU/RU | 2.2 | 2.2.* | Any | Use <stack-root>/current/hadoop-client/conf
- EU/RU | 2.2 | 2.3 | Upgrade | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
- | | | Downgrade | Use <stack-root>/current/hadoop-client/conf
- EU/RU | 2.3 | 2.3.* | Any | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
- '''
-
# The "stack_version" is the desired stack, e.g., 2.2 or 2.3
# In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
- # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
+ # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
# rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
@@ -442,13 +424,16 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
# is the version upgrading/downgrading to.
stack_info = stack_select._get_upgrade_stack()
- if stack_info is not None:
- stack_name = stack_info[0]
- version = stack_info[1]
- else:
- raise Fail("Unable to get parameter 'version'")
-
- Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
+ if stack_info is None:
+ raise Fail("Unable to retrieve the upgrade/downgrade stack information from the request")
+
+ stack_name = stack_info[0]
+ version = stack_info[1]
+
+ Logger.info(
+ "An upgrade/downgrade for {0}-{1} is in progress, determining which hadoop conf dir to use.".format(
+ stack_name, version))
+
# This is the version either upgrading or downgrading to.
if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version):
# Determine if <stack-selector-tool> has been run and if not, then use the current
@@ -465,21 +450,6 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
hadoop_conf_dir = os.path.join(stack_root, version, "hadoop", "conf")
Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
- allow_setting_conf_select_symlink = True
-
- if allow_setting_conf_select_symlink:
- # If not in the middle of an upgrade and on HDP 2.3 or higher, or if
- # upgrading stack to version 2.3.0.0 or higher (which may be upgrade or downgrade), then consider setting the
- # symlink for /etc/hadoop/conf.
- # If a host does not have any HDFS or YARN components (e.g., only ZK), then it will not contain /etc/hadoop/conf
- # Therefore, any calls to <conf-selector-tool> will fail.
- # For that reason, if the hadoop conf directory exists, then make sure it is set.
- if os.path.exists(hadoop_conf_dir):
- conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
- Logger.info("The hadoop conf dir {0} exists, will call {1} on it for version {2}".format(
- hadoop_conf_dir, conf_selector_name, version))
- select(stack_name, "hadoop", version)
-
Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
return hadoop_conf_dir
@@ -587,7 +557,7 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
# <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component]
-
+
select(stack_name, package, version, ignore_errors = True)
# Symlink /etc/[component]/conf to /etc/[component]/conf.backup
@@ -702,4 +672,4 @@ def _get_backup_conf_directory(old_conf):
"""
old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
backup_dir = os.path.join(old_parent, "conf.backup")
- return backup_dir
+ return backup_dir
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/880853a6/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 60022e1..1b324d4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -76,8 +76,6 @@ class TestPhoenixQueryServer(RMFTestCase):
call_mocks = [(0, None, None)]
)
- self.assert_call_to_get_hadoop_conf_dir()
-
self.assertResourceCalled('Execute',
'/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
environment = {'JAVA_HOME':'/usr/jdk64/jdk1.8.0_40',
@@ -134,8 +132,6 @@ class TestPhoenixQueryServer(RMFTestCase):
call_mocks = [(0, None, None)]
)
- self.assert_call_to_get_hadoop_conf_dir()
-
self.assertResourceCalled('Execute',
'/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
environment = {'JAVA_HOME':'/usr/jdk64/jdk1.8.0_40',
@@ -217,18 +213,7 @@ class TestPhoenixQueryServer(RMFTestCase):
self.assertNoMoreResources()
- def assert_call_to_get_hadoop_conf_dir(self):
- # From call to conf_select.get_hadoop_conf_dir()
- self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
- not_if = "test -e /etc/hadoop/conf.backup",
- sudo = True)
- self.assertResourceCalled("Directory", "/etc/hadoop/conf",
- action = ["delete"])
- self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/etc/hadoop/conf.backup")
-
def assert_configure_default(self):
- self.assert_call_to_get_hadoop_conf_dir()
-
self.assertResourceCalled('Directory', '/etc/hbase',
mode = 0755
)
@@ -330,8 +315,6 @@ class TestPhoenixQueryServer(RMFTestCase):
)
def assert_configure_secured(self):
- self.assert_call_to_get_hadoop_conf_dir()
-
self.assertResourceCalled('Directory', '/etc/hbase',
mode = 0755
)
@@ -459,10 +442,4 @@ class TestPhoenixQueryServer(RMFTestCase):
cd_access = 'a',
)
self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
-
- self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
- not_if = "test -e /etc/hadoop/conf.backup",
- sudo = True)
- self.assertResourceCalled("Directory", "/etc/hadoop/conf", action = ["delete"])
- self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/etc/hadoop/conf.backup")
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/880853a6/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 53d16fd..b29cfb5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -749,15 +749,6 @@ class TestHistoryServer(RMFTestCase):
group = 'hadoop',
)
- def assert_call_to_get_hadoop_conf_dir(self):
- # From call to conf_select.get_hadoop_conf_dir()
- self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
- not_if = "test -e /etc/hadoop/conf.backup",
- sudo = True)
- self.assertResourceCalled("Directory", "/etc/hadoop/conf",
- action = ["delete"])
- self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/etc/hadoop/conf.backup")
-
@patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.3.0.0-1234"))
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):
@@ -783,8 +774,6 @@ class TestHistoryServer(RMFTestCase):
self.assertTrue(call("slider", "hadoop", "hdfs", skip=False) in copy_to_hdfs_mock.call_args_list)
# From call to conf_select.get_hadoop_conf_dir()
- self.assert_call_to_get_hadoop_conf_dir()
- self.assert_call_to_get_hadoop_conf_dir()
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -800,11 +789,5 @@ class TestHistoryServer(RMFTestCase):
self.assertNoMoreResources()
- self.assertEquals(5, mocks_dict['call'].call_count)
- self.assertEquals(5, mocks_dict['checked_call'].call_count)
- self.assertEquals(
- ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
- mocks_dict['checked_call'].call_args_list[0][0][0])
- self.assertEquals(
- ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
- mocks_dict['call'].call_args_list[0][0][0])
+ self.assertEquals(1, mocks_dict['call'].call_count)
+ self.assertEquals(1, mocks_dict['checked_call'].call_count)