You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ds...@apache.org on 2016/04/27 15:32:46 UTC
ambari git commit: AMBARI 15790 Clean up stack scripts that refer to
dfs.nameservices to use dfs.internal.nameservices as first option (-ns option
to haadmin command added) (dsen)
Repository: ambari
Updated Branches:
refs/heads/trunk e8a79db74 -> fbd53ed75
AMBARI 15790 Clean up stack scripts that refer to dfs.nameservices to use dfs.internal.nameservices as first option (-ns option to haadmin command added) (dsen)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fbd53ed7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fbd53ed7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fbd53ed7
Branch: refs/heads/trunk
Commit: fbd53ed757e63b6370e7f83c232c09912a93653e
Parents: e8a79db
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Apr 27 16:32:36 2016 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Apr 27 16:32:36 2016 +0300
----------------------------------------------------------------------
.../libraries/functions/namenode_ha_utils.py | 2 +-
.../2.1.0.2.0/package/scripts/hdfs_namenode.py | 6 +-
.../package/scripts/namenode_ha_state.py | 2 +-
.../HDFS/2.1.0.2.0/package/scripts/utils.py | 6 +-
.../HDFS/package/scripts/hdfs_namenode.py | 2 +-
.../python/stacks/2.0.6/HDFS/test_namenode.py | 74 ++++++++++----------
.../python/stacks/2.0.6/configs/ha_default.json | 5 +-
7 files changed, 49 insertions(+), 48 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
index 7187e04..ee16c9b 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
@@ -100,7 +100,7 @@ def get_namenode_states_noretries(hdfs_site, security_enabled, run_user):
state = get_value_from_jmx(jmx_uri, 'tag.HAState', security_enabled, run_user, is_https_enabled)
# If JMX parsing failed
if not state:
- check_service_cmd = "hdfs haadmin -getServiceState {0}".format(nn_unique_id)
+ check_service_cmd = "hdfs haadmin -ns {0} -getServiceState {1}".format(get_nameservice(hdfs_site), nn_unique_id)
code, out = shell.call(check_service_cmd, logoutput=True, user=run_user)
if code == 0 and out:
if HDFS_NN_STATE_STANDBY in out:
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 69235d0..357eb71 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -160,7 +160,7 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
user = params.hdfs_user)
if params.dfs_ha_enabled:
- is_active_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
+ is_active_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -ns {dfs_ha_nameservices} -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
else:
is_active_namenode_cmd = True
@@ -516,8 +516,8 @@ def is_active_namenode(hdfs_binary):
import params
if params.dfs_ha_enabled:
- is_active_this_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
- is_active_other_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -getServiceState {other_namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
+ is_active_this_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -ns {dfs_ha_nameservices} -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
+ is_active_other_namenode_cmd = as_user(format("{hdfs_binary} --config {hadoop_conf_dir} haadmin -ns {dfs_ha_nameservices} -getServiceState {other_namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
for i in range(0, 5):
code, out = shell.call(is_active_this_namenode_cmd) # If active NN, code will be 0
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode_ha_state.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode_ha_state.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode_ha_state.py
index ff12edd..259af2e 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode_ha_state.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode_ha_state.py
@@ -89,7 +89,7 @@ class NamenodeHAState:
# If JMX parsing failed
if not state:
run_user = default("/configurations/hadoop-env/hdfs_user", "hdfs")
- check_service_cmd = "hdfs haadmin -getServiceState {0}".format(nn_unique_id)
+ check_service_cmd = "hdfs haadmin -ns {dfs_ha_nameservices} -getServiceState {0}".format(nn_unique_id)
code, out = shell.call(check_service_cmd, logoutput=True, user=run_user)
if code == 0 and out:
if NAMENODE_STATE.STANDBY in out:
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index 8501a8e..ceee738 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -98,8 +98,8 @@ def initiate_safe_zkfc_failover():
# Failover if this NameNode is active and other NameNode is up and in standby (i.e. ready to become active on failover)
Logger.info(format("NameNode {namenode_id} is active and NameNode {other_namenode_id} is in standby"))
- failover_command = format("hdfs haadmin -failover {namenode_id} {other_namenode_id}")
- check_standby_cmd = format("hdfs haadmin -getServiceState {namenode_id} | grep standby")
+ failover_command = format("hdfs haadmin -ns {dfs_ha_nameservices} -failover {namenode_id} {other_namenode_id}")
+ check_standby_cmd = format("hdfs haadmin -ns {dfs_ha_nameservices} -getServiceState {namenode_id} | grep standby")
msg = "Rolling Upgrade - Initiating a ZKFC failover on active NameNode host {0}.".format(params.hostname)
Logger.info(msg)
@@ -383,4 +383,4 @@ def get_dfsadmin_base_command(hdfs_binary, use_specific_namenode = False):
dfsadmin_base_command = format("{hdfs_binary} dfsadmin -fs hdfs://{params.namenode_rpc}")
else:
dfsadmin_base_command = format("{hdfs_binary} dfsadmin -fs {params.namenode_address}")
- return dfsadmin_base_command
\ No newline at end of file
+ return dfsadmin_base_command
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
index 6b4d0f6..35e16d5 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/hdfs_namenode.py
@@ -45,7 +45,7 @@ def namenode(action=None, do_format=True):
create_log_dir=True
)
if params.dfs_ha_enabled:
- dfs_check_nn_status_cmd = format("su -s /bin/bash - {hdfs_user} -c 'export PATH=$PATH:{hadoop_bin_dir} ; hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active > /dev/null'")
+ dfs_check_nn_status_cmd = format("su -s /bin/bash - {hdfs_user} -c 'export PATH=$PATH:{hadoop_bin_dir} ; hdfs --config {hadoop_conf_dir} haadmin -ns {dfs_ha_nameservices} -getServiceState {namenode_id} | grep active > /dev/null'")
else:
dfs_check_nn_status_cmd = None
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 49935a1..f56a0d2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -453,7 +453,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -471,7 +471,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -489,7 +489,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -555,7 +555,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -573,7 +573,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -591,7 +591,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -607,11 +607,11 @@ class TestNamenode(RMFTestCase):
self.assertTrue(call_mocks.called)
self.assertEqual(5, call_mocks.call_count)
calls = [
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'")]
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'")]
call_mocks.assert_has_calls(calls)
def test_start_ha_secured(self):
@@ -663,7 +663,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -681,7 +681,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -699,7 +699,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -771,7 +771,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -789,7 +789,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -807,7 +807,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -881,7 +881,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -899,7 +899,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -917,7 +917,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -934,7 +934,7 @@ class TestNamenode(RMFTestCase):
self.assertEqual(2, call_mocks.call_count)
calls = [
call('hdfs namenode -bootstrapStandby -nonInteractive', logoutput=False, user=u'hdfs'),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'")]
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'")]
call_mocks.assert_has_calls(calls, any_order=False)
# tests namenode start command when NameNode HA is enabled, and
@@ -999,7 +999,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1017,7 +1017,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1035,7 +1035,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1051,7 +1051,7 @@ class TestNamenode(RMFTestCase):
self.assertTrue(call_mocks.called)
self.assertEqual(3, call_mocks.call_count)
calls = [
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'"),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
call_mocks.assert_has_calls(calls, any_order=True)
@@ -1115,7 +1115,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/tmp',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1133,7 +1133,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1151,7 +1151,7 @@ class TestNamenode(RMFTestCase):
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
keytab = UnknownConfigurationMock(),
hadoop_bin_dir = '/usr/bin',
default_fs = 'hdfs://ns1',
@@ -1167,7 +1167,7 @@ class TestNamenode(RMFTestCase):
self.assertTrue(call_mocks.called)
self.assertEqual(3, call_mocks.call_count)
calls = [
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
+ call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'"),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
call_mocks.assert_has_calls(calls, any_order=True)
@@ -1586,7 +1586,7 @@ class TestNamenode(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None, ''), (0, None)],
mocks_dict=mocks_dict)
-
+
calls = mocks_dict['call'].call_args_list
self.assertTrue(len(calls) >= 1)
self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
@@ -1710,16 +1710,16 @@ class TestNamenode(RMFTestCase):
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
-
+
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
logoutput = True, user = 'hdfs')
-
+
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade prepare',
logoutput = True, user = 'hdfs')
self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
logoutput = True, user = 'hdfs')
-
+
self.assertNoMoreResources()
def test_prepare_rolling_upgrade__upgrade(self):
@@ -1746,7 +1746,7 @@ class TestNamenode(RMFTestCase):
logoutput = True, user = 'hdfs')
self.assertNoMoreResources()
-
+
@patch.object(shell, "call")
@@ -1765,11 +1765,11 @@ class TestNamenode(RMFTestCase):
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
-
- self.assertResourceCalled('Execute',
+
+ self.assertResourceCalled('Execute',
'/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
logoutput = True, user = 'hdfs')
-
+
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/fbd53ed7/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
index 6b4873a..e605c05 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
@@ -169,8 +169,9 @@
"dfs.namenode.stale.datanode.interval": "30000",
"dfs.datanode.ipc.address": "0.0.0.0:8010",
"dfs.namenode.name.dir": "/hadoop/hdfs/namenode",
- "dfs.nameservices": "ns1",
- "dfs.datanode.data.dir": "/hadoop/hdfs/data",
+ "dfs.internal.nameservices": "ns1",
+ "dfs.nameservices": "ns1,ns2",
+ "dfs.datanode.data.dir": "/hadoop/hdfs/data",
"dfs.namenode.https-address.ns1.nn2": "c6402.ambari.apache.org:50470",
"dfs.webhdfs.enabled": "true",
"dfs.namenode.https-address.ns1.nn1": "c6401.ambari.apache.org:50470",