You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2015/05/08 22:38:52 UTC
[3/5] ambari git commit: AMBARI-11034. Implement HdfsResouces and
it's usage (aonishuk)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 583e6e9..1e75eb0 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -61,37 +61,35 @@ def yarn(name = None):
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def yarn(name = None):
import params
- if name in ["nodemanager","historyserver"]:
+ if name == "historyserver":
if params.yarn_log_aggregation_enabled:
- params.HdfsDirectory(params.yarn_nm_app_log_dir,
- action="create_delayed",
+ params.HdfsResource(params.yarn_nm_app_log_dir,
+ action="create_on_execute",
+ type="directory",
owner=params.yarn_user,
group=params.user_group,
mode=0777,
recursive_chmod=True
)
- params.HdfsDirectory("/mapred",
- action="create_delayed",
+ params.HdfsResource("/mapred",
+ type="directory",
+ action="create_on_execute",
owner=params.mapred_user
)
- params.HdfsDirectory("/mapred/system",
- action="create_delayed",
+ params.HdfsResource("/mapred/system",
+ type="directory",
+ action="create_on_execute",
owner=params.hdfs_user
)
- params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
- action="create_delayed",
+ params.HdfsResource(params.mapreduce_jobhistory_done_dir,
+ type="directory",
+ action="create_on_execute",
owner=params.mapred_user,
group=params.user_group,
+ change_permissions_for_parents=True,
mode=0777
)
-
- params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
- action="create_delayed",
- owner=params.mapred_user,
- group=params.user_group,
- mode=01777
- )
- params.HdfsDirectory(None, action="create")
+ params.HdfsResource(None, action="execute")
if name == "nodemanager":
Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -178,12 +176,14 @@ def yarn(name = None):
group=params.user_group
)
if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
- params.HdfsDirectory(params.node_labels_dir,
- action="create",
+ params.HdfsResource(params.node_labels_dir,
+ type="directory",
+ action="create_on_execute",
owner=params.yarn_user,
group=params.user_group,
mode=0700
)
+ params.HdfsResource(None, action="execute")
elif name == 'apptimelineserver':
Directory(params.ats_leveldb_dir,
owner=params.yarn_user,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index b56d15a..cc404eb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -280,36 +280,42 @@ class TestHBaseMaster(RMFTestCase):
owner='hbase',
content='log4jproperties\nline2'
)
- self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- bin_dir = '/usr/bin',
- action = ['create'],
- )
+
+ self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+ security_enabled = False,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = UnknownConfigurationMock(),
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0711,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/etc/hbase',
@@ -401,36 +407,41 @@ class TestHBaseMaster(RMFTestCase):
owner='hbase',
content='log4jproperties\nline2'
)
- self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- action = ['create'],
- )
+ self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+ security_enabled = True,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0711,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
def test_start_default_22(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
@@ -528,35 +539,38 @@ class TestHBaseMaster(RMFTestCase):
owner='hbase',
content='log4jproperties\nline2')
- self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- owner = 'hbase',
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create_delayed'])
-
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create_delayed'])
-
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create'])
+ self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hbase',
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0711,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ )
self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',
not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index ea8d9d4..3175a01 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -207,36 +207,6 @@ class TestHbaseRegionServer(RMFTestCase):
owner='hbase',
content='log4jproperties\nline2'
)
- self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- action = ['create'],
- )
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/etc/hbase',
@@ -328,36 +298,6 @@ class TestHbaseRegionServer(RMFTestCase):
owner='hbase',
content='log4jproperties\nline2'
)
- self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- action = ['create'],
- )
def test_start_default_22(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
@@ -453,37 +393,6 @@ class TestHbaseRegionServer(RMFTestCase):
group='hadoop',
owner='hbase',
content='log4jproperties\nline2')
-
- self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- owner = 'hbase',
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create_delayed'])
-
- self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- mode = 0711,
- owner = 'hbase',
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create_delayed'])
-
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/usr/hdp/current/hadoop-client/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = "/usr/bin/kinit",
- bin_dir = '/usr/hdp/current/hadoop-client/bin',
- action = ['create'])
-
self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-regionserver/conf start regionserver',
not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
user = 'hbase')
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 8b7846c..9b7a91f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -380,6 +380,9 @@ class TestDatanode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
owner = 'hdfs',
group = 'hadoop',
@@ -431,6 +434,9 @@ class TestDatanode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 9975b36..d05e2dc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -209,6 +209,9 @@ class TestJournalnode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -247,6 +250,9 @@ class TestJournalnode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
@patch('time.sleep')
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 2e7af32..488e1a2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -100,38 +100,40 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ only_if = None,
+ keytab = UnknownConfigurationMock(),
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertNoMoreResources()
pass
@@ -205,38 +207,40 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ only_if = None,
+ keytab = UnknownConfigurationMock(),
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertNoMoreResources()
def test_stop_default(self):
@@ -332,38 +336,40 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = True,
+ only_if = None,
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertNoMoreResources()
def test_stop_secured(self):
@@ -429,37 +435,39 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
+ self.assertResourceCalled('HdfsResource', '/tmp',
security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ keytab = UnknownConfigurationMock(),
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -508,37 +516,39 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
+ self.assertResourceCalled('HdfsResource', '/tmp',
security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = True,
only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -596,38 +606,40 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- )
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+ keytab = UnknownConfigurationMock(),
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertNoMoreResources()
# tests namenode start command when NameNode HA is enabled, and
@@ -681,38 +693,40 @@ class TestNamenode(RMFTestCase):
user = 'hdfs',
try_sleep = 10,
)
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- )
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hdfs',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0770,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
+ keytab = UnknownConfigurationMock(),
+ hadoop_bin_dir = '/usr/bin',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertNoMoreResources()
def test_decommission_default(self):
@@ -829,6 +843,9 @@ class TestNamenode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
owner = 'hdfs',
group = 'hadoop',
@@ -868,6 +885,9 @@ class TestNamenode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index d683198..9d6d165 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -221,6 +221,9 @@ class TestNFSGateway(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/etc/security/limits.d',
@@ -253,6 +256,9 @@ class TestNFSGateway(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
@patch("resource_management.libraries.functions.security_commons.build_expectations")
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 3c098b6..36abd40 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -59,36 +59,45 @@ class TestServiceCheck(RMFTestCase):
bin_dir = '/usr/bin',
user = 'hdfs',
)
- self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
- conf_dir = '/etc/hadoop/conf',
- logoutput = True,
- not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
- try_sleep = 3,
- tries = 5,
- bin_dir = '/usr/bin',
+ self.assertResourceCalled('HdfsResource', '/tmp',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
+ action = ['create_on_execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ mode = 0777,
)
- self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
- conf_dir = '/etc/hadoop/conf',
- logoutput = True,
- try_sleep = 3,
- tries = 5,
- bin_dir = '/usr/bin',
+ self.assertResourceCalled('HdfsResource', '/tmp/',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
+ action = ['delete_on_execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
)
- self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
- logoutput = True,
- tries = 5,
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- try_sleep = 3,
+ self.assertResourceCalled('HdfsResource', '/tmp/',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ source = '/etc/passwd',
user = 'hdfs',
+ action = ['create_on_execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
)
- self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
- logoutput = True,
- tries = 5,
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- try_sleep = 3,
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
)
+ self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 5bedf5b..8630fcf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -230,6 +230,9 @@ class TestSNamenode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
owner = 'hdfs',
group = 'hadoop',
@@ -276,6 +279,9 @@ class TestSNamenode(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index 8e7f34f..e34258b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -64,6 +64,9 @@ class TestZkfc(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
@@ -155,6 +158,9 @@ class TestZkfc(RMFTestCase):
content = Template('slaves.j2'),
owner = 'root',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
@@ -245,6 +251,9 @@ class TestZkfc(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
@@ -307,6 +316,9 @@ class TestZkfc(RMFTestCase):
content = Template('slaves.j2'),
owner = 'hdfs',
)
+ self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+ content = StaticFile('fast-hdfs-resource.jar'),
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 670092d..9a98a85 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -22,16 +22,19 @@ import socket
import subprocess
from mock.mock import MagicMock, patch
+from resource_management.libraries.functions import version
from resource_management.core import shell
-from resource_management.libraries.functions import dynamic_variable_interpretation
+from resource_management.libraries.script.script import Script
from stacks.utils.RMFTestCase import *
+
+@patch.object(version, "get_hdp_build_version", new = MagicMock(return_value="2.0.0.0-1234"))
@patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
class TestHiveServer(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
STACK_VERSION = "2.0.6"
UPGRADE_STACK_VERSION = "2.2"
-
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -44,7 +47,7 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
@patch("socket.socket")
- @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default(self, socket_mock):
s = socket_mock.return_value
@@ -83,9 +86,8 @@ class TestHiveServer(RMFTestCase):
)
self.assertNoMoreResources()
-
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
- def test_start_default_no_copy(self, get_tar_mock):
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+ def test_start_default_no_copy(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -95,7 +97,6 @@ class TestHiveServer(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- get_tar_mock.return_value = ("a", "b")
self.assert_configure_default()
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -118,10 +119,9 @@ class TestHiveServer(RMFTestCase):
timeout = 30,
)
self.assertNoMoreResources()
- self.assertFalse(get_tar_mock.called)
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
- def test_start_default_alt_tmp(self, get_tar_mock):
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+ def test_start_default_alt_tmp(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
command = "start",
@@ -130,7 +130,6 @@ class TestHiveServer(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- get_tar_mock.return_value = ("a", "b")
self.assert_configure_default(no_tmp=True)
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -153,11 +152,10 @@ class TestHiveServer(RMFTestCase):
timeout = 30,
)
self.assertNoMoreResources()
- self.assertFalse(get_tar_mock.called)
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
- def test_start_default_alt_nn_ha_tmp(self, get_tar_mock):
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+ def test_start_default_alt_nn_ha_tmp(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
command = "start",
@@ -166,7 +164,6 @@ class TestHiveServer(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- get_tar_mock.return_value = ("a", "b")
self.assert_configure_default(no_tmp=True)
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
@@ -189,9 +186,8 @@ class TestHiveServer(RMFTestCase):
timeout = 30,
)
self.assertNoMoreResources()
- self.assertFalse(get_tar_mock.called)
- @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -217,7 +213,7 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
-
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -231,6 +227,7 @@ class TestHiveServer(RMFTestCase):
@patch("hive_service.check_fs_root")
@patch("socket.socket")
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_secured(self, socket_mock, check_fs_root_mock):
s = socket_mock.return_value
@@ -276,6 +273,7 @@ class TestHiveServer(RMFTestCase):
@patch("socket.socket")
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_stop_secured(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -302,51 +300,89 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
def assert_configure_default(self, no_tmp = False):
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
- security_enabled=False,
- keytab=UnknownConfigurationMock(),
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0777,
- owner='hive',
- bin_dir='/usr/bin',
- action=['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
- security_enabled=False,
- keytab=UnknownConfigurationMock(),
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0700,
- owner='hive',
- bin_dir='/usr/bin',
- action=['create_delayed'],
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hcat',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/hcat',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hcat',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+ security_enabled = False,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = UnknownConfigurationMock(),
+ source = '/usr/share/HDP-webhcat/hive.tar.gz',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['create_on_execute'],
+ group = 'hadoop',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'file',
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/hive',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0700,
)
-
if not no_tmp:
- self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
- security_enabled=False,
- keytab=UnknownConfigurationMock(),
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0777,
- owner='hive',
- group='hdfs',
- action=['create_delayed'],
- bin_dir='/usr/bin',
+ self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
+ security_enabled = False,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ group = 'hdfs',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
)
-
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled=False,
- keytab=UnknownConfigurationMock(),
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- action=['create'],
- bin_dir='/usr/bin',
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/hive',
mode=0755,
@@ -453,49 +489,88 @@ class TestHiveServer(RMFTestCase):
def assert_configure_secured(self):
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
- security_enabled=True,
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0777,
- owner='hive',
- bin_dir='/usr/bin',
- action=['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
- security_enabled=True,
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0700,
- owner='hive',
- bin_dir='/usr/bin',
- action=['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
- security_enabled=True,
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- mode=0777,
- owner='hive',
- group='hdfs',
- action=['create_delayed'],
- bin_dir='/usr/bin',
- )
-
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled=True,
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs',
- kinit_path_local='/usr/bin/kinit',
- action=['create'],
- bin_dir='/usr/bin',
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hcat',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/hcat',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hcat',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
+ security_enabled = True,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ source = '/usr/share/HDP-webhcat/hive.tar.gz',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['create_on_execute'],
+ group = 'hadoop',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'file',
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', '/user/hive',
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0700,
+ )
+ self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
+ security_enabled = True,
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ owner = 'hive',
+ group = 'hdfs',
+ hadoop_bin_dir = '/usr/bin',
+ type = 'directory',
+ action = ['create_on_execute'],
+ mode = 0777,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/hive',
mode=0755,
@@ -624,6 +699,7 @@ class TestHiveServer(RMFTestCase):
self.assert_configure_default()
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
@patch("hive_server.HiveServer.pre_rolling_restart")
@patch("hive_server.HiveServer.start")
def test_stop_during_upgrade(self, hive_server_start_mock,
@@ -771,6 +847,7 @@ class TestHiveServer(RMFTestCase):
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
def test_pre_rolling_restart(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
@@ -785,9 +862,32 @@ class TestHiveServer(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES)
self.assertResourceCalled('Execute',
'hdp-select set hive-server2 %s' % version,)
+ self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
+ security_enabled = False,
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ keytab = UnknownConfigurationMock(),
+ source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['create_on_execute'],
+ group = 'hadoop',
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ type = 'file',
+ mode = 0444,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ )
self.assertNoMoreResources()
@patch("resource_management.core.shell.call")
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
def test_pre_rolling_restart_23(self, call_mock):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
@@ -807,6 +907,28 @@ class TestHiveServer(RMFTestCase):
self.assertResourceCalled('Execute',
'hdp-select set hive-server2 %s' % version,)
+ self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
+ security_enabled = False,
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ keytab = UnknownConfigurationMock(),
+ source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['create_on_execute'],
+ group = 'hadoop',
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ type = 'file',
+ mode = 0444,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+ )
self.assertNoMoreResources()
self.assertEquals(2, mocks_dict['call'].call_count)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 7bbd0a9..86d348f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -24,12 +24,13 @@ import datetime, sys, socket
import resource_management.libraries.functions
@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
@patch("socket.socket")
+@patch("time.time", new=MagicMock(return_value=1431110511.43))
class TestServiceCheck(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
STACK_VERSION = "2.0.6"
- @patch("sys.exit")
- def test_service_check_default(self, sys_exit_mock, socket_mock):
+
+ def test_service_check_default(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
classname="HiveServiceCheck",
@@ -82,16 +83,52 @@ class TestServiceCheck(RMFTestCase):
content = StaticFile('templetonSmoke.sh'),
mode = 0755,
)
- self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 no_keytab false /usr/bin/kinit no_principal',
- logoutput = True,
- path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
- tries = 3,
- try_sleep = 5,
- )
+ self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
+ )
+ self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
+ action = ['create_on_execute'],
+ )
+ self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ source = '/etc/passwd',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
+ action = ['create_on_execute'],
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal',
+ logoutput = True,
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ tries = 3,
+ try_sleep = 5,
+ )
self.assertNoMoreResources()
- @patch("sys.exit")
- def test_service_check_secured(self, sys_exit_mock, socket_mock):
+
+ def test_service_check_secured(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
classname="HiveServiceCheck",
@@ -145,10 +182,47 @@ class TestServiceCheck(RMFTestCase):
content = StaticFile('templetonSmoke.sh'),
mode = 0755,
)
- self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
- logoutput = True,
- path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
- tries = 3,
- try_sleep = 5,
- )
+
+ self.assertResourceCalled('File', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ content = Template('templeton_smoke.pig.j2', templeton_test_input='/tmp/idtest.ambari-qa.1431110511.43.in', templeton_test_output='/tmp/idtest.ambari-qa.1431110511.43.out'),
+ )
+ self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ action = ['create_on_execute'],
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ source = '/tmp/idtest.ambari-qa.1431110511.43.pig',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
+ )
+ self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
+ action = ['create_on_execute'],
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ source = '/etc/passwd',
+ user = 'hdfs',
+ owner = 'ambari-qa',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'file',
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ security_enabled = True,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+ kinit_path_local = '/usr/bin/kinit',
+ user = 'hdfs',
+ action = ['execute'],
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
+ logoutput = True,
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ tries = 3,
+ try_sleep = 5,
+ )
self.assertNoMoreResources()
\ No newline at end of file