You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/12/09 19:43:41 UTC
[1/2] ambari git commit: AMBARI-8614. Run services as sudo on HDP2
(aonishuk)
Repository: ambari
Updated Branches:
refs/heads/trunk 45468b65a -> 6ed4fe450
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py
index 0cabe63..7a0a8cc 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/OOZIE/test_oozie_server.py
@@ -114,18 +114,16 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars `LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`:',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
)
self.assertNoMoreResources()
@@ -160,8 +158,12 @@ class TestOozieServer(RMFTestCase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', "su -s /bin/bash - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
- only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh',
+ only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
+ self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -258,18 +260,16 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars `LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`:',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
)
self.assertNoMoreResources()
@@ -303,8 +303,12 @@ class TestOozieServer(RMFTestCase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', "su -s /bin/bash - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
- only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh',
+ only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
+ self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -395,19 +399,17 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
+ )
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars `LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`:',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
- )
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
def configure_secured(self):
self.assertResourceCalled('HdfsDirectory', '/user/oozie',
@@ -496,16 +498,14 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
+ )
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 /usr/lib/hadoop/ -extjs /usr/share/HDP-oozie/ext.zip -jars `LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`:',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
- )
\ No newline at end of file
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
index d7b3537..65d8656 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
@@ -51,14 +51,11 @@ class TestFlumeHandler(RMFTestCase):
self.assertTrue(set_desired_mock.called)
self.assertTrue(set_desired_mock.call_args[0][0] == 'STARTED')
- self.assertResourceCalled('Execute', format('su -s /bin/bash flume -c "export JAVA_HOME=/usr/jdk64/jdk1.7.0_45; /usr/bin/flume-ng agent '
- '--name a1 '
- '--conf /etc/flume/conf/a1 '
- '--conf-file /etc/flume/conf/a1/flume.conf '
- '-Dflume.monitoring.type=ganglia '
- '-Dflume.monitoring.hosts=c6401.ambari.apache.org:8655" &'),
- wait_for_finish = False)
+ self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name a1 --conf /etc/flume/conf/a1 --conf-file /etc/flume/conf/a1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+ wait_for_finish = False,
+ )
self.assertResourceCalled('Execute', 'pgrep -o -u flume -f ^/usr/jdk64/jdk1.7.0_45.*a1.* > /var/run/flume/a1.pid',
logoutput = True,
tries = 20,
@@ -266,13 +263,11 @@ class TestFlumeHandler(RMFTestCase):
self.assert_configure_many()
- self.assertResourceCalled('Execute', format('su -s /bin/bash flume -c "export JAVA_HOME=/usr/jdk64/jdk1.7.0_45; /usr/bin/flume-ng agent '
- '--name b1 '
- '--conf /etc/flume/conf/b1 '
- '--conf-file /etc/flume/conf/b1/flume.conf '
- '-Dflume.monitoring.type=ganglia '
- '-Dflume.monitoring.hosts=c6401.ambari.apache.org:8655" &'),
- wait_for_finish = False)
+
+ self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name b1 --conf /etc/flume/conf/b1 --conf-file /etc/flume/conf/b1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+ wait_for_finish = False,
+ )
self.assertResourceCalled('Execute', 'pgrep -o -u flume -f ^/usr/jdk64/jdk1.7.0_45.*b1.* > /var/run/flume/b1.pid',
logoutput = True,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 7a87493..a2d0443 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -59,8 +59,11 @@ class TestHiveMetastore(RMFTestCase):
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -101,8 +104,11 @@ class TestHiveMetastore(RMFTestCase):
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -189,10 +195,15 @@ class TestHiveMetastore(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
path = ['/bin', '/usr/bin/'],
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -305,10 +316,15 @@ class TestHiveMetastore(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index c86059e..d0ba749 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -77,8 +77,11 @@ class TestHiveServer(RMFTestCase):
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
+ not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -135,8 +138,11 @@ class TestHiveServer(RMFTestCase):
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
+ not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -309,10 +315,15 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -456,10 +467,15 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_mysql_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_mysql_server.py
index 1d35e97..9c1b27a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_mysql_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_mysql_server.py
@@ -38,11 +38,16 @@ class TestMySqlServer(RMFTestCase):
config_file="default.json"
)
- self.assertResourceCalled('Execute', "sed -i 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|' /etc/my.cnf",
+ self.assertResourceCalled('Execute', ('sed',
+ '-i',
+ 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|',
+ '/etc/my.cnf'),
+ sudo = True,
)
self.assertResourceCalled('Execute', 'service mysql start',
logoutput = True,
- not_if = 'service mysql status | grep running'
+ not_if = 'service mysql status | grep running',
+ user = 'mysql',
)
self.assertNoMoreResources()
@@ -55,6 +60,7 @@ class TestMySqlServer(RMFTestCase):
self.assertResourceCalled('Execute', 'service mysql stop',
logoutput = True,
only_if = 'service mysql status | grep running',
+ user = 'mysql',
)
self.assertNoMoreResources()
@@ -75,11 +81,16 @@ class TestMySqlServer(RMFTestCase):
config_file="secured.json"
)
- self.assertResourceCalled('Execute', "sed -i 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|' /etc/my.cnf",
+ self.assertResourceCalled('Execute', ('sed',
+ '-i',
+ 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|',
+ '/etc/my.cnf'),
+ sudo = True,
)
self.assertResourceCalled('Execute', 'service mysql start',
logoutput = True,
- not_if = 'service mysql status | grep running'
+ not_if = 'service mysql status | grep running',
+ user = 'mysql',
)
self.assertNoMoreResources()
@@ -92,7 +103,8 @@ class TestMySqlServer(RMFTestCase):
self.assertResourceCalled('Execute', 'service mysql stop',
logoutput = True,
- only_if = 'service mysql status | grep running'
+ only_if = 'service mysql status | grep running',
+ user = 'mysql',
)
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index caa6045..fa06253 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -57,7 +57,9 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
user = 'hcat',
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+ self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -94,7 +96,9 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
user = 'hcat',
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+ self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def assert_configure_default(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 3c30153..c85b060 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -63,8 +63,12 @@ class TestOozieServer(RMFTestCase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', "su -s /bin/bash - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
- only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh',
+ only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
+ self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -109,9 +113,13 @@ class TestOozieServer(RMFTestCase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', "su -s /bin/bash - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
- only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh',
+ only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
+ self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
@@ -174,6 +182,12 @@ class TestOozieServer(RMFTestCase):
action=["delete"],
not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
)
+ self.assertResourceCalled('Directory', '/usr/lib/oozie//var/tmp/oozie',
+ owner = 'oozie',
+ group = 'hadoop',
+ recursive = True,
+ mode = 0755,
+ )
self.assertResourceCalled('Directory', '/var/run/oozie',
owner = 'oozie',
group = 'hadoop',
@@ -228,19 +242,35 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie && mkdir -p /usr/lib/oozie/libext && cp /usr/share/HDP-oozie/ext-2.2.zip /usr/lib/oozie/libext && chown oozie:hadoop /usr/lib/oozie/libext/ext-2.2.zip && chown -RL oozie:hadoop /var/lib/oozie/oozie-server/conf && cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext && chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
+ )
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('cp', '/usr/share/HDP-oozie/ext-2.2.zip', '/usr/lib/oozie/libext'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('chown', u'oozie:hadoop', '/usr/lib/oozie/libext/ext-2.2.zip'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('chown', '-RL', u'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', 'sudo cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', 'sudo chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ )
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
- )
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
def assert_configure_secured(self):
@@ -302,7 +332,12 @@ class TestOozieServer(RMFTestCase):
action=["delete"],
not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
)
-
+ self.assertResourceCalled('Directory', '/usr/lib/oozie//var/tmp/oozie',
+ owner = 'oozie',
+ group = 'hadoop',
+ recursive = True,
+ mode = 0755,
+ )
self.assertResourceCalled('Directory', '/var/run/oozie',
owner = 'oozie',
group = 'hadoop',
@@ -357,19 +392,35 @@ class TestOozieServer(RMFTestCase):
recursive = True,
mode = 0755,
)
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie && mkdir -p /usr/lib/oozie/libext && cp /usr/share/HDP-oozie/ext-2.2.zip /usr/lib/oozie/libext && chown oozie:hadoop /usr/lib/oozie/libext/ext-2.2.zip && chown -RL oozie:hadoop /var/lib/oozie/oozie-server/conf && cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext && chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- )
+ self.assertResourceCalled('Directory', '/usr/lib/oozie/libext',
+ recursive = True,
+ )
+ self.assertResourceCalled('Execute', ('tar', '-xvf', '/usr/lib/oozie/oozie-sharelib.tar.gz', '-C', '/usr/lib/oozie'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('cp', '/usr/share/HDP-oozie/ext-2.2.zip', '/usr/lib/oozie/libext'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('chown', u'oozie:hadoop', '/usr/lib/oozie/libext/ext-2.2.zip'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', ('chown', '-RL', u'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ sudo = True,
+ )
+ self.assertResourceCalled('Execute', 'sudo cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ )
+ self.assertResourceCalled('Execute', 'sudo chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ )
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
- not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
- user = 'oozie',
- )
+ not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
+ user = 'oozie',
+ )
def test_configure_default_hdp22(self):
config_file = "stacks/2.0.6/configs/default.json"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 9a3c04b..fef7464 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -61,8 +61,11 @@ class TestHiveMetastore(RMFTestCase):
config_file="../../2.1/configs/default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -103,8 +106,11 @@ class TestHiveMetastore(RMFTestCase):
config_file="../../2.1/configs/secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -167,10 +173,15 @@ class TestHiveMetastore(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -262,10 +273,15 @@ class TestHiveMetastore(RMFTestCase):
owner = 'hive',
group = 'hadoop',
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; rm -f /usr/lib/hive/lib//mysql-connector-java.jar ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+ sudo = True,
not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
index d5512e3..2527e69 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
@@ -66,14 +66,16 @@ class TestStormDrpcServer(TestStormBase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/drpc.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/drpc.pid`',
+ not_if = '! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/drpc.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/drpc.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/drpc.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/drpc.pid')
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -115,12 +117,14 @@ class TestStormDrpcServer(TestStormBase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/drpc.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/drpc.pid`',
+ not_if = '! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/drpc.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/drpc.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/drpc.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/drpc.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('File', '/var/run/storm/drpc.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/drpc.pid')
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
index 4f53b44..3b82e22 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
@@ -65,14 +65,16 @@ class TestStormNimbus(TestStormBase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/nimbus.pid`',
+ not_if = '! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/nimbus.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/nimbus.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/nimbus.pid')
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -114,13 +116,15 @@ class TestStormNimbus(TestStormBase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/nimbus.pid`',
+ not_if = '! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/nimbus.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/nimbus.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/nimbus.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('File', '/var/run/storm/nimbus.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/nimbus.pid')
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_rest_api_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_rest_api_service.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_rest_api_service.py
index 195fa43..a818313 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_rest_api_service.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_rest_api_service.py
@@ -64,14 +64,16 @@ class TestStormRestApi(TestStormBase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/restapi.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/restapi.pid`',
+ not_if = '! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/restapi.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/restapi.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/restapi.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/restapi.pid')
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -114,13 +116,15 @@ class TestStormRestApi(TestStormBase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/restapi.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/restapi.pid`',
+ not_if = '! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/restapi.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/restapi.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/restapi.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/restapi.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('File', '/var/run/storm/restapi.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/restapi.pid')
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
index 73801bf..b968b1b 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
@@ -77,22 +77,26 @@ class TestStormSupervisor(TestStormBase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/supervisor.pid`',
+ not_if = '! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/supervisor.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/supervisor.pid',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/logviewer.pid`',
+ not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/logviewer.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/logviewer.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/supervisor.pid')
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)')
-
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
- ignore_failures = True)
-
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/logviewer.pid')
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -148,21 +152,25 @@ class TestStormSupervisor(TestStormBase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)'
- )
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
- ignore_failures=True
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/supervisor.pid`',
+ not_if = '! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/supervisor.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/supervisor.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/supervisor.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/supervisor.pid',
+ action = ['delete'],
+ )
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/logviewer.pid`',
+ not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/logviewer.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/logviewer.pid',
+ action = ['delete'],
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/supervisor.pid')
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)')
-
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
- ignore_failures = True)
-
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/logviewer.pid')
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
index 36a7ef5..2822833 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
@@ -70,16 +70,16 @@ class TestStormSupervisor(TestStormBase):
self.assertResourceCalled('Execute', 'supervisorctl stop storm-supervisor',
wait_for_finish = False,
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/logviewer.pid`',
+ not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
- ignore_failures = True,
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/logviewer.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/logviewer.pid',
+ self.assertResourceCalled('File', '/var/run/storm/logviewer.pid',
+ action = ['delete'],
)
-
self.assertNoMoreResources()
def test_configure_default(self):
@@ -128,14 +128,16 @@ class TestStormSupervisor(TestStormBase):
self.assertResourceCalled('Execute', 'supervisorctl stop storm-supervisor',
wait_for_finish = False,
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/logviewer.pid`',
+ not_if = '! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
)
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
- ignore_failures = True,
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/logviewer.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/logviewer.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/logviewer.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/logviewer.pid',
+ self.assertResourceCalled('File', '/var/run/storm/logviewer.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
index 85e8f2f..5b6123a 100644
--- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
@@ -63,14 +63,16 @@ class TestStormUiServer(TestStormBase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/ui.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)'
- )
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/ui.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
- ignore_failures=True
- )
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/ui.pid')
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/ui.pid`',
+ not_if = '! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/ui.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/ui.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -113,12 +115,14 @@ class TestStormUiServer(TestStormBase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/storm/ui.pid` >/dev/null 2>&1',
- not_if = '! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
- )
- self.assertResourceCalled('Execute', 'kill -9 `cat /var/run/storm/ui.pid` >/dev/null 2>&1',
- not_if = 'sleep 2; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
- ignore_failures=True
- )
- self.assertResourceCalled('Execute', 'rm -f /var/run/storm/ui.pid')
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/storm/ui.pid`',
+ not_if = '! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/storm/ui.pid`',
+ not_if = 'sleep 2; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1) || sleep 20; ! (ls /var/run/storm/ui.pid >/dev/null 2>&1 && ps -p `cat /var/run/storm/ui.pid` >/dev/null 2>&1)',
+ ignore_failures = True,
+ )
+ self.assertResourceCalled('File', '/var/run/storm/ui.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
index f9c6897..7dfaa83 100644
--- a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
+++ b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
@@ -53,17 +53,25 @@ class TestKnoxGateway(RMFTestCase):
owner = 'knox',
content = InlineTemplate(self.getConfig()['configurations']['topology']['content'])
)
- self.assertResourceCalled('Execute', 'chown -R knox:knox /var/lib/knox/data /var/log/knox /var/run/knox /etc/knox/conf'
+ self.assertResourceCalled('Execute', ('chown',
+ '-R',
+ 'knox:knox',
+ '/var/lib/knox/data',
+ '/var/log/knox',
+ '/var/log/knox',
+ '/var/run/knox',
+ '/etc/knox/conf'),
+ sudo = True,
)
self.assertResourceCalled('Execute', '/usr/lib/knox/bin/knoxcli.sh create-master --master sa',
- user='knox',
- environment={'JAVA_HOME': '/usr/jdk64/jdk1.7.0_45'},
- not_if='test -f /var/lib/knox/data/security/master'
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+ not_if = "/usr/bin/sudo su knox -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; test -f /var/lib/knox/data/security/master'",
+ user = 'knox',
)
self.assertResourceCalled('Execute', '/usr/lib/knox/bin/knoxcli.sh create-cert --hostname c6401.ambari.apache.org',
- user='knox',
- environment={'JAVA_HOME': '/usr/jdk64/jdk1.7.0_45'},
- not_if='test -f /var/lib/knox/data/security/keystores/gateway.jks'
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+ not_if = "/usr/bin/sudo su knox -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; test -f /var/lib/knox/data/security/master'",
+ user = 'knox',
)
self.assertResourceCalled('File', '/etc/knox/conf/ldap-log4j.properties',
mode=0644,
[2/2] ambari git commit: AMBARI-8614. Run services as sudo on HDP2
(aonishuk)
Posted by ao...@apache.org.
AMBARI-8614. Run services as sudo on HDP2 (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6ed4fe45
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6ed4fe45
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6ed4fe45
Branch: refs/heads/trunk
Commit: 6ed4fe4508cf7b1927bacca29a5b85f89b86746a
Parents: 45468b6
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Dec 9 20:39:38 2014 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Dec 9 20:39:38 2014 +0200
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent | 1 +
.../resource_management/TestCopyFromLocal.py | 5 +-
.../TestExecuteHadoopResource.py | 17 ++-
.../libraries/providers/copy_from_local.py | 9 +-
.../libraries/providers/execute_hadoop.py | 3 +-
.../libraries/resources/execute_hadoop.py | 1 +
.../ambari/server/state/ConfigHelper.java | 4 +-
.../services/HIVE/package/files/addMysqlUser.sh | 20 ++--
.../HIVE/package/files/templetonSmoke.sh | 10 +-
.../1.3.2/services/HIVE/package/scripts/hive.py | 6 +-
.../HIVE/package/scripts/hive_service.py | 9 +-
.../HIVE/package/scripts/mysql_service.py | 15 +--
.../services/HIVE/package/scripts/params.py | 2 +
.../HIVE/package/scripts/webhcat_service.py | 4 +-
.../MAPREDUCE/package/scripts/service.py | 5 +-
.../services/OOZIE/package/files/oozieSmoke.sh | 26 ++---
.../services/OOZIE/package/scripts/oozie.py | 29 ++---
.../OOZIE/package/scripts/oozie_service.py | 10 +-
.../services/OOZIE/package/scripts/params.py | 1 +
.../ZOOKEEPER/package/files/zkService.sh | 2 +-
.../services/FLUME/package/scripts/flume.py | 19 ++--
.../FLUME/package/scripts/flume_handler.py | 2 +-
.../services/HIVE/package/files/addMysqlUser.sh | 9 +-
.../HIVE/package/files/templetonSmoke.sh | 10 +-
.../2.0.6/services/HIVE/package/scripts/hive.py | 12 +-
.../HIVE/package/scripts/hive_service.py | 7 +-
.../HIVE/package/scripts/mysql_service.py | 16 ++-
.../services/HIVE/package/scripts/params.py | 2 +
.../HIVE/package/scripts/webhcat_service.py | 4 +-
.../services/OOZIE/package/files/oozieSmoke2.sh | 30 ++---
.../services/OOZIE/package/scripts/oozie.py | 48 +++++---
.../OOZIE/package/scripts/oozie_service.py | 10 +-
.../ZOOKEEPER/package/files/zkService.sh | 2 +-
.../services/STORM/package/scripts/service.py | 10 +-
.../2.1/services/STORM/package/scripts/storm.py | 3 +-
.../2.2/services/KNOX/package/scripts/knox.py | 19 +++-
.../KNOX/package/scripts/knox_gateway.py | 6 +-
.../stacks/1.3.2/HIVE/test_hive_metastore.py | 36 ++++--
.../stacks/1.3.2/HIVE/test_hive_server.py | 38 ++++---
.../stacks/1.3.2/HIVE/test_mysql_server.py | 41 ++++---
.../stacks/1.3.2/HIVE/test_webhcat_server.py | 8 +-
.../MAPREDUCE/test_mapreduce_historyserver.py | 8 +-
.../MAPREDUCE/test_mapreduce_jobtracker.py | 8 +-
.../MAPREDUCE/test_mapreduce_tasktracker.py | 8 +-
.../stacks/1.3.2/OOZIE/test_oozie_server.py | 92 +++++++--------
.../python/stacks/2.0.6/FLUME/test_flume.py | 23 ++--
.../stacks/2.0.6/HIVE/test_hive_metastore.py | 32 ++++--
.../stacks/2.0.6/HIVE/test_hive_server.py | 32 ++++--
.../stacks/2.0.6/HIVE/test_mysql_server.py | 22 +++-
.../stacks/2.0.6/HIVE/test_webhcat_server.py | 8 +-
.../stacks/2.0.6/OOZIE/test_oozie_server.py | 111 ++++++++++++++-----
.../stacks/2.1/HIVE/test_hive_metastore.py | 32 ++++--
.../stacks/2.1/STORM/test_storm_drpc_server.py | 28 +++--
.../stacks/2.1/STORM/test_storm_nimbus.py | 28 +++--
.../2.1/STORM/test_storm_rest_api_service.py | 28 +++--
.../stacks/2.1/STORM/test_storm_supervisor.py | 66 ++++++-----
.../2.1/STORM/test_storm_supervisor_prod.py | 28 ++---
.../stacks/2.1/STORM/test_storm_ui_server.py | 36 +++---
.../python/stacks/2.2/KNOX/test_knox_gateway.py | 22 ++--
59 files changed, 696 insertions(+), 427 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-agent/conf/unix/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent b/ambari-agent/conf/unix/ambari-agent
index 09808f3..30acd65 100755
--- a/ambari-agent/conf/unix/ambari-agent
+++ b/ambari-agent/conf/unix/ambari-agent
@@ -61,6 +61,7 @@ sudo chown -R $current_user "/var/run/ambari-agent"
sudo chown -R $current_user "/var/log/ambari-agent"
sudo chown -R $current_user "/var/lib/ambari-agent/data"
sudo chown -R $current_user "/var/lib/ambari-agent/cache"
+sudo chown $current_user "/usr/lib/ambari-agent"
if [ -a /usr/bin/python2.7 ] && [ -z "$PYTHON" ]; then
PYTHON=/usr/bin/python2.7
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
index 9f53ebe..8383e69 100644
--- a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
+++ b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
@@ -38,7 +38,8 @@ class TestCopyFromLocal(TestCase):
call_arg_list = execute_hadoop_mock.call_args_list
self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
call_arg_list[0][0][0].command)
- self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export PATH=/usr/bin > /dev/null ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
+ print call_arg_list[0][0][0].arguments
+ self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
call_arg_list[0][0][0].arguments)
self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
@@ -59,7 +60,7 @@ class TestCopyFromLocal(TestCase):
call_arg_list = execute_hadoop_mock.call_args_list
self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
call_arg_list[0][0][0].command)
- self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export PATH=/usr/bin > /dev/null ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
+ self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; PATH=$PATH:/usr/bin hadoop fs -ls /apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
call_arg_list[0][0][0].arguments)
self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
index f5308d1..e4fe8df 100644
--- a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
@@ -16,7 +16,6 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
-from stacks.utils.RMFTestCase import *
import os
from unittest import TestCase
@@ -45,7 +44,8 @@ class TestExecuteHadoopResource(TestCase):
'tries': 1,
'user': 'user',
'try_sleep': 0,
- 'path': []})
+ 'path': [],
+ 'environment': {}})
@patch("resource_management.core.providers.system.ExecuteProvider")
@@ -69,7 +69,8 @@ class TestExecuteHadoopResource(TestCase):
'tries': 1,
'user': 'user',
'try_sleep': 0,
- 'path': []})
+ 'path': [],
+ 'environment': {}})
@patch("resource_management.core.providers.system.ExecuteProvider")
@@ -98,7 +99,8 @@ class TestExecuteHadoopResource(TestCase):
'tries': 2,
'user': 'user',
'try_sleep': 2,
- 'path': []})
+ 'path': [],
+ 'environment': {}})
@patch("resource_management.core.providers.system.ExecuteProvider")
@@ -123,6 +125,7 @@ class TestExecuteHadoopResource(TestCase):
{'logoutput': False,
'tries': 1,
'user': 'user',
+ 'environment': {},
'try_sleep': 0,
'path': []})
self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
@@ -130,7 +133,8 @@ class TestExecuteHadoopResource(TestCase):
'tries': 1,
'user': 'user',
'try_sleep': 0,
- 'path': []})
+ 'path': [],
+ 'environment': {}})
@patch("resource_management.core.providers.system.ExecuteProvider")
@@ -183,7 +187,8 @@ class TestExecuteHadoopResource(TestCase):
'tries': 1,
'user': 'user',
'try_sleep': 0,
- 'path': []})
+ 'path': [],
+ 'environment': {}})
@patch("resource_management.core.providers.system.ExecuteProvider")
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
index d854c55..dbd54a7 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
@@ -46,8 +46,13 @@ class CopyFromLocalProvider(Provider):
copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
dest_path = dest_dir + os.sep + dest_file_name
# Need to run unless as resource user
- unless_cmd = as_user("{kinnit_if_needed} ; hadoop fs -ls {dest_path}", owner, env={'PATH':bin_dir})
-
+
+ if kinnit_if_needed:
+ Execute(kinnit_if_needed,
+ user=owner,
+ )
+
+ unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls {dest_path}"), owner)
ExecuteHadoop(copy_cmd,
not_if=unless_cmd,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
index efba0a0..c3ec7c7 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
@@ -47,5 +47,6 @@ class ExecuteHadoopProvider(Provider):
tries = self.resource.tries,
try_sleep = self.resource.try_sleep,
logoutput = self.resource.logoutput,
- path = self.resource.bin_dir
+ path = self.resource.bin_dir,
+ environment = self.resource.environment,
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
index ca66ce4..8b61331 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
@@ -33,6 +33,7 @@ class ExecuteHadoop(Resource):
logoutput = BooleanArgument(default=False)
principal = ResourceArgument(default=lambda obj: obj.user)
bin_dir = ResourceArgument(default=[]) # appended to $PATH
+ environment = ResourceArgument(default={})
conf_dir = ResourceArgument()
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 3bc1db7..42f9601 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -450,7 +450,9 @@ public class ConfigHelper {
for (PropertyInfo serviceProperty : serviceProperties) {
if(serviceProperty.getPropertyTypes().contains(propertyType)) {
String stackPropertyConfigType = fileNameToConfigType(serviceProperty.getFilename());
- result.add(cluster.getDesiredConfigByType(stackPropertyConfigType).getProperties().get(serviceProperty.getName()));
+ try {
+ result.add(cluster.getDesiredConfigByType(stackPropertyConfigType).getProperties().get(serviceProperty.getName()));
+ } catch(Exception ex) {}
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/addMysqlUser.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/addMysqlUser.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/addMysqlUser.sh
index 8d31b91..e8925e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/addMysqlUser.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/addMysqlUser.sh
@@ -26,16 +26,16 @@ mysqldbpasswd=$3
mysqldbhost=$4
myhostname=$(hostname -f)
-service $mysqldservice start
+sudo su mysql -s /bin/bash - -c "service $mysqldservice start"
echo "Adding user $mysqldbuser@$mysqldbhost and $mysqldbuser@localhost"
-mysql -u root -e "CREATE USER '$mysqldbuser'@'$mysqldbhost' IDENTIFIED BY '$mysqldbpasswd';"
-mysql -u root -e "CREATE USER '$mysqldbuser'@'localhost' IDENTIFIED BY '$mysqldbpasswd';"
-mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$mysqldbhost';"
-mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'localhost';"
-if [ '$(mysql -u root -e "select user from mysql.user where user='$mysqldbuser' and host='$myhostname'" | grep "$mysqldbuser")' != '0' ]; then
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"CREATE USER '$mysqldbuser'@'$mysqldbhost' IDENTIFIED BY '$mysqldbpasswd';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"CREATE USER '$mysqldbuser'@'localhost' IDENTIFIED BY '$mysqldbpasswd';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$mysqldbhost';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'localhost';\""
+if [ '$(sudo su mysql -s /bin/bash - -c "mysql -u root -e \"select user from mysql.user where user='$mysqldbuser' and host='$myhostname'\" | grep \"$mysqldbuser\"")' != '0' ]; then
echo "Adding user $mysqldbuser@$myhostname";
- mysql -u root -e "CREATE USER '$mysqldbuser'@'$myhostname' IDENTIFIED BY '$mysqldbpasswd';";
- mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$myhostname';";
+ sudo su mysql -s /bin/bash - -c "mysql -u root -e \"CREATE USER '$mysqldbuser'@'$myhostname' IDENTIFIED BY '$mysqldbpasswd';\";"
+ sudo su mysql -s /bin/bash - -c "mysql -u root -e \"GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$myhostname';\";"
fi
-mysql -u root -e "flush privileges;"
-service $mysqldservice stop
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"flush privileges;\""
+sudo su mysql -s /bin/bash - -c "service $mysqldservice stop"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
index 53dd717..662142d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
@@ -35,7 +35,7 @@ fi
export no_proxy=$ttonhost
cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' $ttonurl/status 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
@@ -49,7 +49,7 @@ exit 0
#try hcat ddl command
echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d \@${destdir}/show_db.post.txt $ttonurl/ddl 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
@@ -75,17 +75,17 @@ echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
#copy pig script to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
#copy input file to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
#create, copy post args file
echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
#submit pig query
cmd="curl -s -w 'http_code <%{http_code}>' -d \@${destdir}/pig_post.txt $ttonurl/pig 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
index 48aa62b..6d35f9f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
@@ -162,12 +162,14 @@ def jdbc_connector():
import params
if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
- cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
+ cmd = ('cp', format('/usr/share/java/{jdbc_jar_name}'), params.target)
Execute(cmd,
not_if=format("test -f {target}"),
creates=params.target,
- path=["/bin", "/usr/bin/"])
+ path=["/bin", "/usr/bin/"],
+ sudo=True
+ )
elif params.hive_jdbc_driver == "org.postgresql.Driver":
cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
index df78aad..c68391a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_service.py
@@ -84,12 +84,15 @@ def hive_service(
elapsed_time = time.time() - start_time
if is_service_socket_valid == False:
- raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsed_time))
+ raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsedwebhcat_service.py_time))
print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time)
elif action == 'stop':
- demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}")
+ demon_cmd = format("sudo kill `cat {pid_file}`")
Execute(demon_cmd,
- not_if = format("! ({process_id_exists})")
+ not_if = format("! ({process_id_exists})")
+ )
+ File(pid_file,
+ action = "delete",
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
index cfb3e08..8447d60 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/mysql_service.py
@@ -25,14 +25,15 @@ def mysql_service(daemon_name=None, action='start'):
cmd = format('service {daemon_name} {action}')
if action == 'status':
- logoutput = False
+ Execute(cmd,
+ path="/usr/local/bin/:/bin/:/sbin/",
+ logoutput=False)
else:
- logoutput = True
-
- Execute(cmd,
- path="/usr/local/bin/:/bin/:/sbin/",
- tries=1,
- logoutput=logoutput)
+ import params
+ Execute(cmd,
+ path="/usr/local/bin/:/bin/:/sbin/",
+ user=params.mysql_user,
+ logoutput=True)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index 69babb0..04b1c97 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -87,6 +87,8 @@ start_hiveserver2_script = 'startHiveserver2.sh'
hadoop_home = '/usr'
+mysql_user = 'mysql'
+
##Starting metastore
start_metastore_script = 'startMetastore.sh'
hive_metastore_pid = status_params.hive_metastore_pid
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
index dda07c0..fbad9fa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
@@ -37,4 +37,6 @@ def webhcat_service(action='start'):
Execute(demon_cmd,
user=params.webhcat_user
)
- Execute(format('rm -f {webhcat_pid_file}'))
+ File(params.webhcat_pid_file,
+ action="delete",
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/service.py
index 9358123..1244472 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/service.py
@@ -47,9 +47,10 @@ def service(
)
elif action == 'stop':
daemon_cmd = format("{cmd} stop {name}")
- rm_pid = format("rm -f {pid_file}")
Execute(daemon_cmd,
user=params.mapred_user
)
- Execute(rm_pid)
+ File(pid_file,
+ action = "delete",
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
index e7ff4af..68bc22c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/files/oozieSmoke.sh
@@ -32,9 +32,9 @@ function checkOozieJobStatus {
local i=0
local rc=1
local cmd="source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie ${OOZIE_SERVER} -info $job_id"
- su -s /bin/bash - ${smoke_test_user} -c "$cmd"
+ sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"
while [ $i -lt $num_of_tries ] ; do
- cmd_output=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+ cmd_output=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
(IFS='';echo $cmd_output)
act_status=$(IFS='';echo $cmd_output | grep ^Status | cut -d':' -f2 | sed 's| ||g')
echo "workflow_status=$act_status"
@@ -67,12 +67,12 @@ export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml oozie.ba
export OOZIE_EXAMPLES_DIR=`rpm -ql oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
cd $OOZIE_EXAMPLES_DIR
-tar -zxf oozie-examples.tar.gz
-sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
-sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
-sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo tar -zxf oozie-examples.tar.gz
+sudo sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
if [[ $security_enabled == "True" ]]; then
kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user}; "
@@ -80,13 +80,13 @@ else
kinitcmd=""
fi
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -rmr examples"
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -rmr input-data"
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -rmr examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -rmr input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties -run"
-job_info=`su -s /bin/bash - ${smoke_test_user} -c "$cmd" | grep "job:"`
+job_info=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
job_id="`echo $job_info | cut -d':' -f2`"
checkOozieJobStatus "$job_id"
OOZIE_EXIT_CODE="$?"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
index db070b4..72d6bb1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
@@ -128,23 +128,26 @@ def oozie_server_specific(
mode = 0755,
recursive = True
)
-
- cmd1 = "cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz"
- cmd2 = format("cd /usr/lib/oozie && mkdir -p {oozie_tmp_dir}")
- # this is different for HDP2
- cmd3 = format("cd /usr/lib/oozie && chown {oozie_user}:{user_group} {oozie_tmp_dir}")
- if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
- cmd3 += format(" && mkdir -p {oozie_libext_dir} && cp {jdbc_driver_jar} {oozie_libext_dir}")
-
- # this is different for HDP2
- cmd4 = format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 {hadoop_jar_location} -extjs {ext_js_path} {jar_option} {jar_path}")
+ Directory(params.oozie_libext_dir,
+ recursive=True,
+ )
no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
- Execute( [cmd1, cmd2, cmd3],
- not_if = no_op_test
+
+ Execute(('tar','-xvf', format('{oozie_home}/oozie-sharelib.tar.gz'), '-C', params.oozie_home),
+ not_if = no_op_test,
+ sudo = True
)
- Execute( cmd4,
+
+ if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
+ Execute(('cp', params.jdbc_driver_jar, params.oozie_libext_dir),
+ not_if = no_op_test,
+ sudo = True
+ )
+
+ oozie_setup_cmd = format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 {hadoop_jar_location} -extjs {ext_js_path} {jar_option} {jar_path}")
+ Execute( oozie_setup_cmd,
user = params.oozie_user,
not_if = no_op_test
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
index 9e89a62..6f94e01 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_service.py
@@ -58,9 +58,13 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
not_if = no_op_test,
)
elif action == 'stop':
- stop_cmd = format("su -s /bin/bash - {oozie_user} -c 'cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f {pid_file}")
- Execute( stop_cmd,
- only_if = no_op_test
+ stop_cmd = format("cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-stop.sh")
+ Execute(stop_cmd,
+ only_if = no_op_test,
+ user = params.oozie_user
+ )
+ File(params.pid_file,
+ action = "delete",
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
index 4c53c4d..deb7549 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
@@ -42,6 +42,7 @@ pid_file = status_params.pid_file
hadoop_jar_location = "/usr/lib/hadoop/"
# for HDP2 it's "/usr/share/HDP-oozie/ext-2.2.zip"
ext_js_path = "/usr/share/HDP-oozie/ext.zip"
+oozie_home = "/usr/lib/oozie"
oozie_libext_dir = "/usr/lib/oozie/libext"
lzo_enabled = config['configurations']['mapred-env']['lzo_enabled']
security_enabled = config['configurations']['cluster-env']['security_enabled']
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/files/zkService.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/files/zkService.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/files/zkService.sh
index 56ce676..33c8161 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/files/zkService.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/files/zkService.sh
@@ -23,4 +23,4 @@
zkcli_script=$1
user=$2
conf_dir=$3
-su -s /bin/bash - $user -c "source $conf_dir/zookeeper-env.sh ; echo 'ls /' | $zkcli_script"
+sudo su $user -s /bin/bash - -c "source $conf_dir/zookeeper-env.sh ; echo 'ls /' | $zkcli_script"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
index 0ebbee5..9b1dfe1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
@@ -71,8 +71,9 @@ def flume(action = None):
_set_desired_state('STARTED')
# It is important to run this command as a background process.
- flume_base = format('su -s /bin/bash {flume_user} -c "export JAVA_HOME={java_home}; '
- '{flume_bin} agent --name {{0}} --conf {{1}} --conf-file {{2}} {{3}}" &')
+
+
+ flume_base = as_user(format("{flume_bin} agent --name {{0}} --conf {{1}} --conf-file {{2}} {{3}}"), params.flume_user, env={'JAVA_HOME': params.java_home}) + " &"
for agent in cmd_target_names():
flume_agent_conf_dir = params.flume_conf_dir + os.sep + agent
@@ -92,7 +93,10 @@ def flume(action = None):
flume_cmd = flume_base.format(agent, flume_agent_conf_dir,
flume_agent_conf_file, extra_args)
- Execute(flume_cmd, wait_for_finish=False)
+ Execute(flume_cmd,
+ wait_for_finish=False,
+ environment={'JAVA_HOME': params.java_home}
+ )
# sometimes startup spawns a couple of threads - so only the first line may count
pid_cmd = format('pgrep -o -u {flume_user} -f ^{java_home}.*{agent}.* > {flume_agent_pid_file}')
@@ -177,11 +181,10 @@ def cmd_target_names():
def _set_desired_state(state):
import params
- try:
- with open(os.path.join(params.flume_run_dir, 'ambari-state.txt'), 'w') as fp:
- fp.write(state)
- except:
- pass
+ filename = os.path.join(params.flume_run_dir, 'ambari-state.txt')
+ File(filename,
+ content = state,
+ )
def get_desired_state():
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
index 66e40d5..ade2cf5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_handler.py
@@ -55,9 +55,9 @@ class FlumeHandler(Script):
def status(self, env):
import params
-
env.set_params(params)
+
processes = get_flume_status(params.flume_conf_dir, params.flume_run_dir)
expected_agents = find_expected_agent_names(params.flume_conf_dir)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/addMysqlUser.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/addMysqlUser.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/addMysqlUser.sh
index d3ae6e4..f366055 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/addMysqlUser.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/addMysqlUser.sh
@@ -27,7 +27,8 @@ userhost=$4
service $mysqldservice start
echo "Adding user $mysqldbuser@$userhost and $mysqldbuser@localhost"
-mysql -u root -e "CREATE USER '$mysqldbuser'@'$userhost' IDENTIFIED BY '$mysqldbpasswd';"
-mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$userhost';"
-mysql -u root -e "flush privileges;"
-service $mysqldservice stop
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"CREATE USER '$mysqldbuser'@'$userhost' IDENTIFIED BY '$mysqldbpasswd';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$userhost';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"flush privileges;\""
+
+sudo su mysql -s /bin/bash - -c "service $mysqldservice stop"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
index e26148b..22202ee 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
@@ -35,7 +35,7 @@ fi
export no_proxy=$ttonhost
cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' $ttonurl/status 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
@@ -49,7 +49,7 @@ exit 0
#try hcat ddl command
echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d \@${destdir}/show_db.post.txt $ttonurl/ddl 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
@@ -75,17 +75,17 @@ echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
#copy pig script to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
#copy input file to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
#create, copy post args file
echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
#submit pig query
cmd="curl -s -w 'http_code <%{http_code}>' -d \@${destdir}/pig_post.txt $ttonurl/pig 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
if [[ "$httpExitCode" -ne "200" ]] ; then
echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
index a5220cb..d8ee70d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
@@ -188,13 +188,15 @@ def jdbc_connector():
import params
if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
- cmd = format("hive mkdir -p {artifact_dir} ; rm -f {target} ; cp /usr/share/java/{jdbc_jar_name} {target}")
-
- Execute(cmd,
+ File(params.target,
+ action="delete",
+ )
+ Execute(('cp', format('/usr/share/java/{jdbc_jar_name}'), params.target),
not_if=format("test -f {target}"),
creates=params.target,
- environment= {'PATH' : params.execute_path },
- path=["/bin", "/usr/bin/"])
+ path=["/bin", "/usr/bin/"],
+ sudo=True
+ )
elif params.hive_jdbc_driver == "org.postgresql.Driver":
cmd = format("hive mkdir -p {artifact_dir} ; rm -f {target} ; cp /usr/share/java/{jdbc_jar_name} {target}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
index 84cb1a4..8e5d878 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
@@ -95,9 +95,12 @@ def hive_service(
print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time)
elif action == 'stop':
- demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}")
+ demon_cmd = format("sudo kill `cat {pid_file}`")
Execute(demon_cmd,
- not_if = format("! ({process_id_exists})")
+ not_if = format("! ({process_id_exists})")
+ )
+ File(pid_file,
+ action = "delete",
)
def check_fs_root():
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_service.py
index 11bbdd8..8c72174 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/mysql_service.py
@@ -21,25 +21,31 @@ limitations under the License.
from resource_management import *
-def mysql_service(daemon_name=None, action='start'):
+def mysql_service(daemon_name=None, action='start'):
status_cmd = format('service {daemon_name} status | grep running')
cmd = format('service {daemon_name} {action}')
if action == 'status':
Execute(status_cmd)
elif action == 'stop':
+ import params
Execute(cmd,
logoutput = True,
- only_if = status_cmd
+ only_if = status_cmd,
+ user = params.mysql_user
)
elif action == 'start':
+ import params
# required for running hive
- replace_bind_address = format("sed -i 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|' {mysql_configname}")
- Execute(replace_bind_address)
+ replace_bind_address = ('sed','-i','s|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|',params.mysql_configname)
+ Execute(replace_bind_address,
+ sudo = True,
+ )
Execute(cmd,
logoutput = True,
- not_if = status_cmd
+ not_if = status_cmd,
+ user = params.mysql_user
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index f85a381..698fd78 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -227,6 +227,8 @@ if System.get_instance().os_family == "ubuntu":
mysql_configname = '/etc/mysql/my.cnf'
else:
mysql_configname = '/etc/my.cnf'
+
+mysql_user = 'mysql'
# Hive security
hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
index 644d554..25a60e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
@@ -37,4 +37,6 @@ def webhcat_service(action='start'):
Execute(demon_cmd,
user=params.webhcat_user
)
- Execute(format('rm -f {webhcat_pid_file}'))
+ File(params.webhcat_pid_file,
+ action="delete",
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
index 954f13b..30d878c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
@@ -36,9 +36,9 @@ function checkOozieJobStatus {
local i=0
local rc=1
local cmd="source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie job -oozie ${OOZIE_SERVER} -info $job_id"
- su -s /bin/bash - ${smoke_test_user} -c "$cmd"
+ sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"
while [ $i -lt $num_of_tries ] ; do
- cmd_output=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
+ cmd_output=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
(IFS='';echo $cmd_output)
act_status=$(IFS='';echo $cmd_output | grep ^Status | cut -d':' -f2 | sed 's| ||g')
echo "workflow_status=$act_status"
@@ -84,15 +84,15 @@ if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
fi
cd $OOZIE_EXAMPLES_DIR
-tar -zxf oozie-examples.tar.gz
-chmod -R o+rx examples
+sudo tar -zxf oozie-examples.tar.gz
+sudo chmod -R o+rx examples
-sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
-sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
-sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
-sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
+sudo sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
if [[ $security_enabled == "True" ]]; then
kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user}; "
@@ -100,14 +100,14 @@ else
kinitcmd=""
fi
-su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
-su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
-su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
-su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie -Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config $OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties -run"
echo $cmd
-job_info=`su -s /bin/bash - ${smoke_test_user} -c "$cmd" | grep "job:"`
+job_info=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd" | grep "job:"`
job_id="`echo $job_info | cut -d':' -f2`"
checkOozieJobStatus "$job_id" 15
OOZIE_EXIT_CODE="$?"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
index 5ea4ebc..3e8b71b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
@@ -128,36 +128,50 @@ def oozie_server_specific():
not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
)
- oozie_server_directorties = [params.oozie_pid_dir, params.oozie_log_dir, params.oozie_tmp_dir, os.path.abspath(os.path.join(params.oozie_data_dir, "..")), params.oozie_data_dir, params.oozie_lib_dir, params.oozie_webapps_dir, params.oozie_webapps_conf_dir, params.oozie_server_dir]
+ oozie_server_directorties = [format("{oozie_home}/{oozie_tmp_dir}"), params.oozie_pid_dir, params.oozie_log_dir, params.oozie_tmp_dir, os.path.abspath(os.path.join(params.oozie_data_dir, "..")), params.oozie_data_dir, params.oozie_lib_dir, params.oozie_webapps_dir, params.oozie_webapps_conf_dir, params.oozie_server_dir]
Directory( oozie_server_directorties,
owner = params.oozie_user,
group = params.user_group,
mode = 0755,
recursive = True
)
-
- cmd1 = format("cd {oozie_home} && tar -xvf oozie-sharelib.tar.gz")
- cmd2 = format("cd {oozie_home} && mkdir -p {oozie_tmp_dir}")
- # this is different for HDP1
- cmd3 = format("cd {oozie_home} && chown {oozie_user}:{user_group} {oozie_tmp_dir} && mkdir -p {oozie_libext_dir} && cp {ext_js_path} {oozie_libext_dir} && chown {oozie_user}:{user_group} {oozie_libext_dir}/{ext_js_file} && chown -RL {oozie_user}:{user_group} {oozie_webapps_conf_dir}")
+ Directory(params.oozie_libext_dir,
+ recursive=True,
+ )
+
+ configure_cmds = []
+ configure_cmds.append(('tar','-xvf',format('{oozie_home}/oozie-sharelib.tar.gz'),'-C',params.oozie_home))
+ configure_cmds.append(('cp', params.ext_js_path, params.oozie_libext_dir))
+ configure_cmds.append(('chown', format('{oozie_user}:{user_group}'), format('{oozie_libext_dir}/{ext_js_file}')))
+ configure_cmds.append(('chown', '-RL', format('{oozie_user}:{user_group}'), params.oozie_webapps_conf_dir))
+
+ no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
+ Execute( configure_cmds,
+ not_if = no_op_test,
+ sudo = True,
+ )
+
if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
- cmd3 += format(" && cp {jdbc_driver_jar} {oozie_libext_dir}")
+ Execute(('cp', params.jdbc_driver_jar, params.oozie_libext_dir),
+ not_if = no_op_test,
+ sudo = True,
+ )
#falcon el extension
if params.has_falcon_host:
- cmd3 += format(' && cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir} && chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar')
+ Execute(format('sudo cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}'),
+ not_if = no_op_test,
+ )
+ Execute(format('sudo chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'),
+ not_if = no_op_test,
+ )
if params.lzo_enabled:
Package(params.lzo_packages_for_current_host)
+ Execute(format('sudo cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'),
+ not_if = no_op_test,
+ )
- cmd3 += format(' && cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}')
- # this is different for HDP1
- cmd4 = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war")
-
- no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
- Execute( [cmd1, cmd2, cmd3],
- not_if = no_op_test
- )
- Execute( cmd4,
+ Execute(format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war"),
user = params.oozie_user,
not_if = no_op_test
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
index 93cfda9..a1d7bad 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
@@ -67,9 +67,13 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
not_if = no_op_test,
)
elif action == 'stop':
- stop_cmd = format("su -s /bin/bash - {oozie_user} -c 'cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-stop.sh' && rm -f {pid_file}")
- Execute( stop_cmd,
- only_if = no_op_test
+ stop_cmd = format("cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-stop.sh")
+ Execute(stop_cmd,
+ only_if = no_op_test,
+ user = params.oozie_user
+ )
+ File(params.pid_file,
+ action = "delete",
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/files/zkService.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/files/zkService.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/files/zkService.sh
index 6e167a4..46296df 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/files/zkService.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/files/zkService.sh
@@ -23,4 +23,4 @@
zkcli_script=$1
user=$2
conf_dir=$3
-su -s /bin/bash - $user -c "source $conf_dir/zookeeper-env.sh ; echo 'ls /' | $zkcli_script"
+sudo su $user -s /bin/bash - -c "source $conf_dir/zookeeper-env.sh ; echo 'ls /' | $zkcli_script"
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/service.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/service.py
index 8ad4b36..ed6add5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/service.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/service.py
@@ -71,12 +71,14 @@ def service(
elif action == "stop":
process_dont_exist = format("! ({no_op_test})")
- pid = format("`cat {pid_file}` >/dev/null 2>&1")
- Execute(format("kill {pid}"),
+ pid = format("`cat {pid_file}`")
+ Execute(format("sudo kill {pid}"),
not_if=process_dont_exist
)
- Execute(format("kill -9 {pid}"),
+ Execute(format("sudo kill -9 {pid}"),
not_if=format("sleep 2; {process_dont_exist} || sleep 20; {process_dont_exist}"),
ignore_failures=True
)
- Execute(format("rm -f {pid_file}"))
+ File(pid_file,
+ action = "delete",
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py
index bb065da..0462f97 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py
@@ -21,7 +21,6 @@ limitations under the License.
from resource_management import *
from yaml_utils import escape_yaml_propetry
import sys
-from ambari_agent.AgentException import AgentException
def storm():
import params
@@ -89,4 +88,4 @@ def _find_real_user_min_uid():
for line in f:
if line.strip().startswith('UID_MIN') and len(line.split()) == 2 and line.split()[1].isdigit():
return int(line.split()[1])
- raise AgentException ("Unable to find UID_MIN in file /etc/login.defs. Expecting format e.g.: 'UID_MIN 500'")
+ raise Fail("Unable to find UID_MIN in file /etc/login.defs. Expecting format e.g.: 'UID_MIN 500'")
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox.py
index 627462a..7d7d20c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox.py
@@ -55,20 +55,31 @@ def knox():
owner = params.knox_user,
template_tag = None
)
- cmd = format('chown -R {knox_user}:{knox_group} {knox_data_dir} {knox_logs_dir} {knox_pid_dir} {knox_conf_dir}')
- Execute(cmd)
+
+ dirs_to_chown = (params.knox_data_dir, params.knox_logs_dir, params.knox_logs_dir, params.knox_pid_dir, params.knox_conf_dir)
+ cmd = ('chown','-R',format('{knox_user}:{knox_group}'))+dirs_to_chown
+ Execute(cmd,
+ sudo = True,
+ )
+
+ #File([params.knox_data_dir, params.knox_logs_dir, params.knox_logs_dir, params.knox_pid_dir, params.knox_conf_dir],
+ # owner = params.knox_user,
+ # group = params.knox_group
+ #)
cmd = format('{knox_client_bin} create-master --master {knox_master_secret!p}')
+ master_secret_exist = as_user(format('test -f {knox_master_secret_path}'), params.knox_user)
+
Execute(cmd,
user=params.knox_user,
environment={'JAVA_HOME': params.java_home},
- not_if=format('test -f {knox_master_secret_path}')
+ not_if=master_secret_exist,
)
cmd = format('{knox_client_bin} create-cert --hostname {knox_host_name_in_cluster}')
Execute(cmd,
user=params.knox_user,
environment={'JAVA_HOME': params.java_home},
- not_if=format('test -f {knox_cert_store_path}')
+ not_if=master_secret_exist,
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox_gateway.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox_gateway.py
index 7f47cd3..f578926 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox_gateway.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/knox_gateway.py
@@ -28,8 +28,10 @@ class KnoxGateway(Script):
self.install_packages(env)
import params
env.set_params(params)
- cmd = format('rm -f {knox_conf_dir}/topologies/sandbox.xml')
- Execute(cmd)
+
+ File(format('{knox_conf_dir}/topologies/sandbox.xml'),
+ action = "delete",
+ )
def configure(self, env):
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
index 46b3098..eaf01e7 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
@@ -85,8 +85,11 @@ class TestHiveMetastore(RMFTestCase):
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -153,16 +156,22 @@ class TestHiveMetastore(RMFTestCase):
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid',
- not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive.pid`',
+ not_if = '! (ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
def assert_configure_default(self):
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', '/usr/bin/'],
+ sudo = True,
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Directory', '/etc/hive/conf',
owner = 'hive',
@@ -223,10 +232,13 @@ class TestHiveMetastore(RMFTestCase):
)
def assert_configure_secured(self):
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', '/usr/bin/'],
+ sudo = True,
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Directory', '/etc/hive/conf',
owner = 'hive',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
index 7d43b61..abccc72 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
@@ -92,9 +92,12 @@ class TestHiveServer(RMFTestCase):
command = "stop",
config_file="default.json"
)
-
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
+
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
+ not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -172,8 +175,11 @@ class TestHiveServer(RMFTestCase):
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
+ self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
+ not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
+ )
+ self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
+ action = ['delete'],
)
self.assertNoMoreResources()
@@ -208,10 +214,13 @@ class TestHiveServer(RMFTestCase):
kinit_path_local = "/usr/bin/kinit",
action = ['create'],
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', '/usr/bin/'],
+ sudo = True,
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Directory', '/etc/hive/conf',
owner = 'hive',
@@ -300,10 +309,13 @@ class TestHiveServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
action = ['create'],
)
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ self.assertResourceCalled('Execute', ('cp',
+ '/usr/share/java/mysql-connector-java.jar',
+ '/usr/lib/hive/lib//mysql-connector-java.jar'),
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', '/usr/bin/'],
+ sudo = True,
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
)
self.assertResourceCalled('Directory', '/etc/hive/conf',
owner = 'hive',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_mysql_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_mysql_server.py
index 304c54a..e8961fe 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_mysql_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_mysql_server.py
@@ -38,9 +38,9 @@ class TestMySqlServer(RMFTestCase):
)
self.assertResourceCalled('Execute', 'service mysql start',
- logoutput = True,
- path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ logoutput = True,
+ path = ['/usr/local/bin/:/bin/:/sbin/'],
+ user = 'mysql',
)
self.assertNoMoreResources()
@@ -52,9 +52,9 @@ class TestMySqlServer(RMFTestCase):
)
self.assertResourceCalled('Execute', 'service mysql stop',
- logoutput = True,
- path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ logoutput = True,
+ path = ['/usr/local/bin/:/bin/:/sbin/'],
+ user = 'mysql',
)
self.assertNoMoreResources()
@@ -76,10 +76,10 @@ class TestMySqlServer(RMFTestCase):
)
self.assertResourceCalled('Execute', 'service mysql start',
- logoutput = True,
- path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
- )
+ logoutput = True,
+ path = ['/usr/local/bin/:/bin/:/sbin/'],
+ user = 'mysql',
+ )
self.assertNoMoreResources()
def test_stop_secured(self):
@@ -88,19 +88,18 @@ class TestMySqlServer(RMFTestCase):
command = "stop",
config_file="secured.json"
)
-
self.assertResourceCalled('Execute', 'service mysql stop',
- logoutput = True,
- path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
- )
+ logoutput = True,
+ path = ['/usr/local/bin/:/bin/:/sbin/'],
+ user = 'mysql',
+ )
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Execute', 'service mysql start',
- logoutput = True,
- path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ logoutput = True,
+ path = ['/usr/local/bin/:/bin/:/sbin/'],
+ user = 'mysql',
)
self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
content = StaticFile('addMysqlUser.sh'),
@@ -114,14 +113,14 @@ class TestMySqlServer(RMFTestCase):
self.assertResourceCalled('Execute', 'service mysql stop',
logoutput = True,
path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ user = 'mysql',
)
def assert_configure_secured(self):
self.assertResourceCalled('Execute', 'service mysql start',
logoutput = True,
path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ user = 'mysql',
)
self.assertResourceCalled('File', '/tmp/addMysqlUser.sh',
content = StaticFile('addMysqlUser.sh'),
@@ -135,5 +134,5 @@ class TestMySqlServer(RMFTestCase):
self.assertResourceCalled('Execute', 'service mysql stop',
logoutput = True,
path = ['/usr/local/bin/:/bin/:/sbin/'],
- tries = 1,
+ user = 'mysql',
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
index d509d33..e6fb576 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
@@ -55,7 +55,9 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
user = 'hcat',
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+ self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def test_configure_secured(self):
@@ -92,7 +94,9 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
user = 'hcat',
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+ self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def assert_configure_default(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
index 573b2d2..4e6fb45 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
@@ -65,7 +65,9 @@ class TestHistoryServer(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop historyserver',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-historyserver.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
@@ -108,7 +110,9 @@ class TestHistoryServer(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop historyserver',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-historyserver.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-historyserver.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def assert_configure_default(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
index 7816a61..64ed904 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
@@ -61,7 +61,9 @@ class TestJobtracker(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop jobtracker',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def test_decommission_default(self):
@@ -139,7 +141,9 @@ class TestJobtracker(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop jobtracker',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-jobtracker.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def test_decommission_secured(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ed4fe45/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
index 462bd11..9e380bb 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
@@ -64,7 +64,9 @@ class TestTasktracker(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop tasktracker',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-tasktracker.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-tasktracker.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
@@ -106,7 +108,9 @@ class TestTasktracker(RMFTestCase):
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop tasktracker',
user = 'mapred'
)
- self.assertResourceCalled('Execute', 'rm -f /var/run/hadoop/mapred/hadoop-mapred-tasktracker.pid')
+ self.assertResourceCalled('File', '/var/run/hadoop/mapred/hadoop-mapred-tasktracker.pid',
+ action = ['delete'],
+ )
self.assertNoMoreResources()
def assert_configure_default(self):