You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/01/31 20:50:32 UTC
[07/51] [partial] AMBARI-4491. Move all the supported versions in
Baikal for stack to python code (remove dependence on puppet). (aonishuk)
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/scripts/zookeeper_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/scripts/zookeeper_service.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/scripts/zookeeper_service.py
deleted file mode 100644
index 83b8f08..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/scripts/zookeeper_service.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python2.6
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-
-def zookeeper_service(action='start'):
- import params
-
- cmd = format("env ZOOCFGDIR={config_dir} ZOOCFG=zoo.cfg {zk_bin}/zkServer.sh")
-
- if action == 'start':
- daemon_cmd = format("source {config_dir}/zookeeper-env.sh ; {cmd} start")
- no_op_test = format("ls {zk_pid_file} >/dev/null 2>&1 && ps `cat {zk_pid_file}` >/dev/null 2>&1")
- Execute(daemon_cmd,
- not_if=no_op_test,
- user=params.zk_user
- )
- elif action == 'stop':
- daemon_cmd = format("source {config_dir}/zookeeper-env.sh ; {cmd} stop")
- rm_pid = format("rm -f {zk_pid_file}")
- Execute(daemon_cmd,
- user=params.zk_user
- )
- Execute(rm_pid)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/configuration.xsl.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/configuration.xsl.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/configuration.xsl.j2
deleted file mode 100644
index c003ba2..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/configuration.xsl.j2
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0"?>
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-<xsl:output method="html"/>
-<xsl:template match="configuration">
-<html>
-<body>
-<table border="1">
-<tr>
- <td>name</td>
- <td>value</td>
- <td>description</td>
-</tr>
-<xsl:for-each select="property">
- <tr>
- <td><a name="{name}"><xsl:value-of select="name"/></a></td>
- <td><xsl:value-of select="value"/></td>
- <td><xsl:value-of select="description"/></td>
- </tr>
-</xsl:for-each>
-</table>
-</body>
-</html>
-</xsl:template>
-</xsl:stylesheet>
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zoo.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zoo.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zoo.cfg.j2
deleted file mode 100644
index 5b68218..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zoo.cfg.j2
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#
-
-# The number of milliseconds of each tick
-tickTime={{tickTime}}
-# The number of ticks that the initial
-# synchronization phase can take
-initLimit={{initLimit}}
-# The number of ticks that can pass between
-# sending a request and getting an acknowledgement
-syncLimit={{syncLimit}}
-# the directory where the snapshot is stored.
-dataDir={{zk_data_dir}}
-# the port at which the clients will connect
-clientPort={{clientPort}}
-{% for host in zookeeper_hosts %}
-server.{{loop.index}}={{host}}:2888:3888
-{% endfor %}
-
-{% if security_enabled %}
-authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
-jaasLoginRenew=3600000
-kerberos.removeHostFromPrincipal=true
-kerberos.removeRealmFromPrincipal=true
-{% endif %}
-
-{% if zoo_cfg_properties_map_length > 0 %}
-# Custom properties
-{% endif %}
-{% for key, value in zoo_cfg_properties_map.iteritems() %}
-{{key}}={{value}}
-{% endfor %}
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper-env.sh.j2
deleted file mode 100644
index 493a2a4..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper-env.sh.j2
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-export JAVA_HOME={{java64_home}}
-export ZOO_LOG_DIR={{zk_log_dir}}
-export ZOOPIDFILE={{zk_pid_file}}
-export SERVER_JVMFLAGS={{zk_server_heapsize}}
-export JAVA=$JAVA_HOME/bin/java
-export CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*
-
-{% if security_enabled %}
-export SERVER_JVMFLAGS="$SERVER_JVMFLAGS -Djava.security.auth.login.config={{zk_server_jaas_file}}"
-export CLIENT_JVMFLAGS="$CLIENT_JVMFLAGS -Djava.security.auth.login.config={{zk_client_jaas_file}}"
-{% endif %}
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_client_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_client_jaas.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_client_jaas.conf.j2
deleted file mode 100644
index 696718e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_client_jaas.conf.j2
+++ /dev/null
@@ -1,5 +0,0 @@
-Client {
-com.sun.security.auth.module.Krb5LoginModule required
-useKeyTab=false
-useTicketCache=true;
-};
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_jaas.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_jaas.conf.j2
deleted file mode 100644
index aa123e1..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/ZOOKEEPER/package/templates/zookeeper_jaas.conf.j2
+++ /dev/null
@@ -1,8 +0,0 @@
-Server {
-com.sun.security.auth.module.Krb5LoginModule required
-useKeyTab=true
-storeKey=true
-useTicketCache=false
-keyTab="{{zk_keytab_path}}"
-principal="{{zk_principal}}";
-};
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_monitor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_monitor.py b/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_monitor.py
new file mode 100644
index 0000000..a1ba41c
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_monitor.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestGangliaMonitor(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="configure",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Group', 'hadoop',
+ )
+ self.assertResourceCalled('Group', 'nobody',
+ )
+ self.assertResourceCalled('Group', 'nobody',
+ )
+ self.assertResourceCalled('User', 'nobody',
+ groups = [u'nobody'],
+ )
+ self.assertResourceCalled('User', 'nobody',
+ groups = [u'nobody'],
+ )
+ self.assertResourceCalled('Directory', '/etc/ganglia/hdp',
+ owner = 'root',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
+ content = StaticFile('gmetad.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
+ content = StaticFile('gmond.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
+ content = StaticFile('checkGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
+ content = StaticFile('checkRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
+ content = StaticFile('gmetadLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
+ content = StaticFile('gmondLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
+ content = StaticFile('rrdcachedLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
+ content = StaticFile('setupGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
+ content = StaticFile('startGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
+ content = StaticFile('startGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
+ content = StaticFile('startRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
+ content = StaticFile('stopGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
+ content = StaticFile('stopGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
+ content = StaticFile('stopRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
+ content = StaticFile('teardownGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHistoryServer -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Directory', '/etc/ganglia/conf.d',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/conf.d/modgstatus.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/conf.d/multicpu.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/gmond.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="start",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'chkconfig gmond off',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmond start >> /tmp/gmond.log 2>&1 ; /bin/ps auwx | /bin/grep [g]mond >> /tmp/gmond.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="stop",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmond stop >> /tmp/gmond.log 2>&1 ; /bin/ps auwx | /bin/grep [g]mond >> /tmp/gmond.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertNoMoreResources()
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_server.py b/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_server.py
new file mode 100644
index 0000000..61f3735
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_server.py
@@ -0,0 +1,226 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestGangliaServer(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="configure",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Group', 'hadoop',
+ )
+ self.assertResourceCalled('Group', 'nobody',
+ )
+ self.assertResourceCalled('Group', 'nobody',
+ )
+ self.assertResourceCalled('User', 'nobody',
+ groups = [u'nobody'],
+ )
+ self.assertResourceCalled('User', 'nobody',
+ groups = [u'nobody'],
+ )
+ self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
+ content = StaticFile('gmetad.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
+ content = StaticFile('gmond.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
+ content = StaticFile('checkGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
+ content = StaticFile('checkRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
+ content = StaticFile('gmetadLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
+ content = StaticFile('gmondLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
+ content = StaticFile('rrdcachedLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
+ content = StaticFile('setupGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
+ content = StaticFile('startGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
+ content = StaticFile('startGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
+ content = StaticFile('startRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
+ content = StaticFile('stopGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
+ content = StaticFile('stopGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
+ content = StaticFile('stopRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
+ content = StaticFile('teardownGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPJobTracker -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHistoryServer -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPDataNode -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPTaskTracker -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseRegionServer -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -t -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Directory', '/var/lib/ganglia/dwoo',
+ owner = 'nobody',
+ recursive = True,
+ mode = 0777,
+ )
+ self.assertResourceCalled('Directory', '/srv/www/cgi-bin',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/srv/www/cgi-bin/rrd.py',
+ content = StaticFile('rrd.py'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/gmetad.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="start",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmetad start >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('MonitorWebserver', 'restart',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="stop",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmetad stop >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('MonitorWebserver', 'restart',
+ )
+ self.assertNoMoreResources()
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_client.py
new file mode 100644
index 0000000..327e88f
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_client.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHBaseClient(RMFTestCase):
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_client.py",
+ classname = "HbaseClient",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_client_jaas.conf',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_client.py",
+ classname = "HbaseClient",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertNoMoreResources()
+
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
new file mode 100644
index 0000000..69db529
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHBaseMaster(RMFTestCase):
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "start",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
+ not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
+ user = 'hbase'
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "stop",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master && rm -f /var/run/hbase/hbase-hbase-master.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_decom_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "decommission",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host2',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "start",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
+ not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "stop",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master && rm -f /var/run/hbase/hbase-hbase-master.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_decom_secure(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "decommission",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-MASTER',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-MASTER',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_master_jaas.conf',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py
new file mode 100644
index 0000000..a18c1fd
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_regionserver.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHbaseRegionServer(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "start",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
+ not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
+ user = 'hbase'
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "stop",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver && rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "start",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
+ not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript("1.3.2/services/HBASE/package/scripts/hbase_regionserver.py",
+ classname = "HbaseRegionServer",
+ command = "stop",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver && rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_regionserver_jaas.conf',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hcat_client.py
new file mode 100644
index 0000000..6088548
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hcat_client.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHcatClient(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hcat_client.py",
+ classname = "HCatClient",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+ owner = 'hcat',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('Directory', '/var/run/webhcat',
+ owner = 'hcat',
+ recursive = True,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+ owner = 'hcat',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hcat_client.py",
+ classname = "HCatClient",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+ owner = 'hcat',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('Directory', '/var/run/webhcat',
+ owner = 'hcat',
+ recursive = True,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hcatalog/conf/hcat-env.sh',
+ owner = 'hcat',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
new file mode 100644
index 0000000..6355b9c
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveClient(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_client.py",
+ classname = "HiveClient",
+ command = "configure",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 420,
+ conf_dir = '/etc/hive/conf',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_client.py",
+ classname = "HiveClient",
+ command = "configure",
+ config_file="secured.json"
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 420,
+ conf_dir = '/etc/hive/conf',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
new file mode 100644
index 0000000..59084ab
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveMetastore(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "configure",
+ config_file="default.json"
+ )
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "start",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+ not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+ user = 'hive'
+ )
+
+ self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+ path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+ )
+
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "stop",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+ self.assertNoMoreResources()
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "configure",
+ config_file="secured.json"
+ )
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "start",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server',
+ not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+ user = 'hive'
+ )
+
+ self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+ path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+ )
+
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_metastore.py",
+ classname = "HiveMetastore",
+ command = "stop",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive.pid')
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', 'usr/bin/'],
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 384,
+ conf_dir = '/etc/hive/conf.server',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/tmp/start_metastore_script',
+ content = StaticFile('startMetastore.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/lib/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', 'usr/bin/'],
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 384,
+ conf_dir = '/etc/hive/conf.server',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/tmp/start_metastore_script',
+ content = StaticFile('startMetastore.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/lib/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
new file mode 100644
index 0000000..b5d9a40
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHiveServer(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "configure",
+ config_file="default.json"
+ )
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "start",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+ not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+ user = 'hive'
+ )
+
+ self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+ path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+ )
+
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "stop",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+ self.assertNoMoreResources()
+
+
+ def test_configure_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "configure",
+ config_file="secured.json"
+ )
+ self.assert_configure_secured()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "start",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server',
+ not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+ user = 'hive'
+ )
+
+ self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true hive asd com.mysql.jdbc.Driver',
+ path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin']
+ )
+
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript("1.3.2/services/HIVE/package/scripts/hive_server.py",
+ classname = "HiveServer",
+ command = "stop",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid')
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', 'usr/bin/'],
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 384,
+ conf_dir = '/etc/hive/conf.server',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+ content = StaticFile('startHiveserver2.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/lib/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+ creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+ path = ['/bin', 'usr/bin/'],
+ not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+ )
+ self.assertResourceCalled('Directory', '/etc/hive/conf.server',
+ owner = 'hive',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 384,
+ conf_dir = '/etc/hive/conf.server',
+ configurations = self.getConfig()['configurations']['hive-site'],
+ )
+ self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+ not_if = '[ -f DBConnectionVerification.jar]',
+ )
+ self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
+ content = StaticFile('startHiveserver2.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/lib/hive',
+ owner = 'hive',
+ group = 'hadoop',
+ mode = 493,
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+ content = Template('hive-env.sh.j2', conf_dir="/etc/hive/conf.server"),
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+ owner = 'hive',
+ group = 'hadoop',
+ )
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
new file mode 100644
index 0000000..3b19451
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+import datetime
+import resource_management.libraries.functions
+@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
+class TestServiceCheck(RMFTestCase):
+
+ def test_service_check_default(self):
+
+ self.executeScript("1.3.2/services/HIVE/package/scripts/service_check.py",
+ classname="HiveServiceCheck",
+ command="service_check",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('File', '/tmp/hiveserver2Smoke.sh',
+ content = StaticFile('hiveserver2Smoke.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('File', '/tmp/hiveserver2.sql',
+ content = StaticFile('hiveserver2.sql'),
+ )
+ self.assertResourceCalled('Execute', "env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hiveserver2Smoke.sh jdbc:hive2://[u'c6402.ambari.apache.org']:10000 /tmp/hiveserver2.sql",
+ logoutput = True,
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
+ content = StaticFile('hcatSmoke.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke prepare',
+ logoutput = True,
+ path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
+ logoutput = True,
+ user = 'hdfs',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
+ logoutput = True,
+ path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertNoMoreResources()
+
+ def test_service_check_secured(self):
+
+ self.executeScript("1.3.2/services/HIVE/package/scripts/service_check.py",
+ classname="HiveServiceCheck",
+ command="service_check",
+ config_file="secured.json"
+ )
+ self.assertResourceCalled('File', '/tmp/hiveserver2Smoke.sh',
+ content = StaticFile('hiveserver2Smoke.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('File', '/tmp/hiveserver2.sql',
+ content = StaticFile('hiveserver2.sql'),
+ )
+ self.assertResourceCalled('Execute', "/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hiveserver2Smoke.sh jdbc:hive2://[u'c6402.ambari.apache.org']:10000/\\;principal=/etc/security/keytabs/hive.service.keytab /tmp/hiveserver2.sql",
+ logoutput = True,
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
+ content = StaticFile('hcatSmoke.sh'),
+ mode = 493,
+ )
+ self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke prepare',
+ logoutput = True,
+ path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
+ logoutput = True,
+ user = 'hdfs',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
+ logoutput = True,
+ path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+ tries = 3,
+ user = 'ambari-qa',
+ try_sleep = 5,
+ )
+ self.assertNoMoreResources()