You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ja...@apache.org on 2014/09/15 21:12:37 UTC

[4/6] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
deleted file mode 100644
index c56ae5f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-import sys
-
-
-def webhcat():
-  import params
-
-  params.HdfsDirectory(params.webhcat_apps_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=0755
-  )
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
-  Directory(params.templeton_pid_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.templeton_log_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.config_dir,
-            owner=params.webhcat_user,
-            group=params.user_group)
-
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['webhcat-site'],
-            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
-            owner=params.webhcat_user,
-            group=params.user_group,
-  )
-
-  File(format("{config_dir}/webhcat-env.sh"),
-       owner=params.webhcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.webhcat_env_sh_template)
-  )
-
-  if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-  else:
-    kinit_if_needed = ""
-
-  if kinit_if_needed:
-    Execute(kinit_if_needed,
-            user=params.webhcat_user,
-            path='/bin'
-    )
-
-  CopyFromLocal(params.hadoop_streeming_jars,
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
deleted file mode 100644
index 2111fa4..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-
-from webhcat import webhcat
-from webhcat_service import webhcat_service
-
-class WebHCatServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    webhcat_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.pid_file)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
deleted file mode 100644
index a92446d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service(action='start'):
-  import params
-
-  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
-
-  if action == 'start':
-    demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
-            user=params.webhcat_user,
-            not_if=no_op_test
-    )
-  elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
-    Execute(format('rm -f {pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
index 218ba8a..82cbd79 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
@@ -26,10 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
index 38280ae..77cf405 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
@@ -23,62 +23,78 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.13.0.2.1</version>
 
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>global</config-type>
-        <config-type>mapred-site</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <components>
         <component>
-          <name>HCAT</name>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>hive-site.xml</fileName>
-              <dictionaryName>hive-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-env.sh</fileName>
-              <dictionaryName>hive-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-log4j.properties</fileName>
-              <dictionaryName>hive-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-exec-log4j.properties</fileName>
-              <dictionaryName>hive-exec-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
+          <name>HIVE_SERVER</name>
+          <dependencies>
+            <dependency>
+              <name>TEZ/TEZ_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>global</config-type>
+        <config-type>mapred-site</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 0eca719..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>glusterfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>glusterfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>glusterfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The glusterfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index fc985f9..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
index b97a31a..a958e2e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
@@ -26,9 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
index b9baa9c..0c45712 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
@@ -37,31 +37,63 @@
           </dependencies>
         </component>
       </components>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>tez-site</config-type>
-        <config-type>hive-env</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>tez-site</config-type>
+        <config-type>hive-env</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 9b6c14e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index ea621c6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
index 686d596..10b8e4e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
@@ -23,9 +23,5 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.14.0.2.2</version>
     </service>
-    <service>
-      <name>HCATALOG</name>
-      <version>0.14.0.2.2</version>
-    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index a5222aa..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <version>0.14.0.2.2</version>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..ce9f2c5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
index 7548296..effe437 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
@@ -32,6 +32,18 @@
             <package>
               <name>mysql-connector-java</name>
             </package>
+            <package>
+              <name>hive_2_9_9_9_117-hcatalog</name>
+            </package>
+            <package>
+              <name>hive_2_9_9_9_117-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
           </packages>
         </osSpecific>
         <osSpecific>
@@ -60,22 +72,5 @@
         </osSpecific>
       </osSpecifics>
     </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.14.0.2.9.9.9</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive_2_9_9_9_117-hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-    </service>
-
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index ce9f2c5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index dfb1b81..346202e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -18,35 +18,26 @@
 
 package org.apache.ambari.server.api.util;
 
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.*;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.*;
-
 import org.apache.ambari.server.state.stack.ConfigurationXml;
 import org.junit.Test;
 import org.xml.sax.SAXException;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-
 import javax.xml.bind.JAXBException;
 import javax.xml.namespace.QName;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPathExpressionException;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class StackExtensionHelperTest {
 
@@ -305,7 +296,7 @@ public class StackExtensionHelperTest {
     helper.populateServicesForStack(stackInfo);
     helper.fillInfo();
     List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
-    assertEquals(13, allServices.size());
+    assertEquals(12, allServices.size());
     for (ServiceInfo serviceInfo : allServices) {
       if (serviceInfo.getName().equals("NAGIOS")) {
         assertTrue(serviceInfo.isMonitoringService());

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 784b565..8a3e270 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -18,69 +18,19 @@
 
 package org.apache.ambari.server.controller;
 
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Type;
-import java.net.ConnectException;
-import java.net.MalformedURLException;
-import java.net.UnknownHostException;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
 import junit.framework.Assert;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ClusterNotFoundException;
-import org.apache.ambari.server.DuplicateResourceException;
-import org.apache.ambari.server.HostNotFoundException;
-import org.apache.ambari.server.ObjectNotFoundException;
-import org.apache.ambari.server.ParentObjectNotFoundException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.ServiceNotFoundException;
-import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.actionmanager.ActionDBAccessor;
-import org.apache.ambari.server.actionmanager.ActionType;
-import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
-import org.apache.ambari.server.actionmanager.HostRoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.actionmanager.Request;
-import org.apache.ambari.server.actionmanager.Stage;
-import org.apache.ambari.server.actionmanager.TargetHostType;
+import org.apache.ambari.server.*;
+import org.apache.ambari.server.actionmanager.*;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
-import org.apache.ambari.server.controller.internal.HostResourceProviderTest;
-import org.apache.ambari.server.controller.internal.RequestOperationLevel;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
-import org.apache.ambari.server.controller.internal.ServiceResourceProviderTest;
+import org.apache.ambari.server.controller.internal.*;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
@@ -93,52 +43,31 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.serveraction.ServerActionManager;
 import org.apache.ambari.server.serveraction.ServerActionManagerImpl;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigFactory;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
-import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.HostComponentAdminState;
-import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.RepositoryInfo;
-import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponent;
-import org.apache.ambari.server.state.ServiceComponentFactory;
-import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.ServiceComponentHostFactory;
-import org.apache.ambari.server.state.ServiceFactory;
-import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
+import org.apache.ambari.server.state.svccomphost.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.collections.CollectionUtils;
 import org.easymock.Capture;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.ConnectException;
+import java.net.MalformedURLException;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class AmbariManagementControllerTest {
 
@@ -7110,7 +7039,7 @@ public class AmbariManagementControllerTest {
   public void testGetStackServices() throws Exception {
     StackServiceRequest request = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, null);
     Set<StackServiceResponse> responses = controller.getStackServices(Collections.singleton(request));
-    Assert.assertEquals(12, responses.size());
+    Assert.assertEquals(11, responses.size());
 
 
     StackServiceRequest requestWithParams = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, SERVICE_NAME);

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
index ffe2ea0..be5aea8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
@@ -4,9 +4,6 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.StackServiceResponse;
 import org.apache.ambari.server.state.DependencyInfo;
 import org.easymock.EasyMockSupport;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.isA;
-
 import org.junit.Before;
 import org.junit.Test;
 
@@ -15,7 +12,9 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 
-import static org.junit.Assert.*;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.isA;
+import static org.junit.Assert.assertEquals;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -53,7 +52,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -92,7 +91,7 @@ public class BaseBlueprintProcessorTest {
                  5, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-                 "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+                 "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
                  "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -175,7 +174,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -212,7 +211,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
       "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -234,7 +233,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -271,7 +270,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -293,7 +292,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -330,7 +329,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -352,7 +351,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -389,7 +388,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index c7f5f1b..138b4b1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -18,93 +18,51 @@
 
 package org.apache.ambari.server.upgrade;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNull;
-import static junit.framework.Assert.assertTrue;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.isA;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-import javax.persistence.EntityTransaction;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Order;
-import javax.persistence.criteria.Path;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-import javax.persistence.metamodel.SingularAttribute;
-
+import com.google.inject.*;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.easymock.Capture;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityTransaction;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import javax.persistence.metamodel.SingularAttribute;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+
+import static junit.framework.Assert.*;
+import static org.easymock.EasyMock.*;
 
 /**
  * UpgradeCatalog170 unit tests.
  */
 public class UpgradeCatalog170Test {
 
+  private Injector injector;
+  private final String CLUSTER_NAME = "c1";
+  private final String SERVICE_NAME = "HDFS";
+  private final String HOST_NAME = "h1";
+  private final String DESIRED_STACK_VERSION = "{\"stackName\":\"HDP\",\"stackVersion\":\"2.0.6\"}";
+
   Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
   EntityManager entityManager = createStrictMock(EntityManager.class);
 
@@ -113,8 +71,145 @@ public class UpgradeCatalog170Test {
     reset(entityManagerProvider);
     expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
     replay(entityManagerProvider);
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+  }
+
+  @After
+  public void tearDown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  private ClusterEntity createCluster() {
+    ResourceTypeDAO resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
+
+    // create an admin resource to represent this cluster
+    ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findById(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+    if (resourceTypeEntity == null) {
+      resourceTypeEntity = new ResourceTypeEntity();
+      resourceTypeEntity.setId(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+      resourceTypeEntity.setName(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE_NAME);
+      resourceTypeEntity = resourceTypeDAO.merge(resourceTypeEntity);
+    }
+    ResourceEntity resourceEntity = new ResourceEntity();
+    resourceEntity.setResourceType(resourceTypeEntity);
+
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterEntity clusterEntity = new ClusterEntity();
+    clusterEntity.setClusterId(1L);
+    clusterEntity.setClusterName(CLUSTER_NAME);
+    clusterEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    clusterEntity.setResource(resourceEntity);
+    clusterDAO.create(clusterEntity);
+    return clusterEntity;
+  }
+
+  private ClusterServiceEntity createService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ClusterServiceEntity clusterServiceEntity = new ClusterServiceEntity();
+    clusterServiceEntity.setClusterId(1L);
+    clusterServiceEntity.setClusterEntity(clusterEntity);
+    clusterServiceEntity.setServiceName(serviceName);
+    clusterServiceDAO.create(clusterServiceEntity);
+    return clusterServiceEntity;
   }
 
+  private ClusterServiceEntity addService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+
+    ClusterServiceEntity clusterServiceEntity = createService(clusterEntity, serviceName);
+
+    ServiceDesiredStateEntity serviceDesiredStateEntity = new ServiceDesiredStateEntity();
+    serviceDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    serviceDesiredStateEntity.setClusterId(1L);
+    serviceDesiredStateEntity.setServiceName(serviceName);
+    serviceDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+
+    clusterServiceEntity.setServiceDesiredStateEntity(serviceDesiredStateEntity);
+    clusterEntity.getClusterServiceEntities().add(clusterServiceEntity);
+
+    clusterDAO.merge(clusterEntity);
+
+    return clusterServiceEntity;
+  }
+
+
+  private HostEntity createHost(ClusterEntity clusterEntity) {
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    HostEntity hostEntity = new HostEntity();
+    hostEntity.setHostName(HOST_NAME);
+    hostEntity.setClusterEntities(Collections.singletonList(clusterEntity));
+    hostDAO.create(hostEntity);
+    clusterEntity.getHostEntities().add(hostEntity);
+    clusterDAO.merge(clusterEntity);
+    return hostEntity;
+  }
+
+  @Transactional
+  private void addComponent(ClusterEntity clusterEntity, ClusterServiceEntity clusterServiceEntity, HostEntity hostEntity, String componentName) {
+    ServiceComponentDesiredStateEntity componentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setComponentName(componentName);
+    componentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    componentDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setClusterId(clusterServiceEntity.getClusterId());
+
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+    hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentDesiredStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentDesiredStateEntity.setComponentName(componentName);
+    hostComponentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentDesiredStateEntity.setAdminState(HostComponentAdminState.INSERVICE);
+    hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+    hostComponentDesiredStateEntity.setHostEntity(hostEntity);
+    hostComponentDesiredStateDAO.create(hostComponentDesiredStateEntity);
+
+
+    HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+    hostComponentStateEntity.setHostEntity(hostEntity);
+    hostComponentStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentStateEntity.setComponentName(componentName);
+    hostComponentStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentStateEntity.setCurrentStackVersion(clusterEntity.getDesiredStackVersion());
+    hostComponentStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+
+    componentDesiredStateEntity.setHostComponentStateEntities(Collections.singletonList(hostComponentStateEntity));
+    componentDesiredStateEntity.setHostComponentDesiredStateEntities(Collections.singletonList(hostComponentDesiredStateEntity));
+
+    hostEntity.getHostComponentStateEntities().add(hostComponentStateEntity);
+    hostEntity.getHostComponentDesiredStateEntities().add(hostComponentDesiredStateEntity);
+
+    clusterServiceEntity.getServiceComponentDesiredStateEntities().add(componentDesiredStateEntity);
+
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    serviceComponentDesiredStateDAO.merge(componentDesiredStateEntity);
+    hostDAO.merge(hostEntity);
+    clusterServiceDAO.merge(clusterServiceEntity);
+  }
+
+  @Test
+  public void testMoveHcatalogIntoHiveService()  throws AmbariException{
+    final ClusterEntity clusterEntity = createCluster();
+    final ClusterServiceEntity clusterServiceEntityHDFS = addService(clusterEntity, "HDFS");
+    final ClusterServiceEntity clusterServiceEntityHIVE = addService(clusterEntity, "HIVE");
+    final ClusterServiceEntity clusterServiceEntityHCATALOG = addService(clusterEntity, "HCATALOG");
+    final ClusterServiceEntity clusterServiceEntityWEBHCAT = addService(clusterEntity, "WEBHCAT");
+    final HostEntity hostEntity = createHost(clusterEntity);
+    addComponent(clusterEntity, clusterServiceEntityHDFS, hostEntity, "NAMENODE");
+    addComponent(clusterEntity, clusterServiceEntityHIVE, hostEntity, "HIVE_SERVER");
+    addComponent(clusterEntity, clusterServiceEntityHCATALOG, hostEntity, "HCAT");
+    addComponent(clusterEntity, clusterServiceEntityWEBHCAT, hostEntity, "WEBHCAT_SERVER");
+    UpgradeCatalog170 upgradeCatalog170 = injector.getInstance(UpgradeCatalog170.class);
+    upgradeCatalog170.moveHcatalogIntoHiveService();
+  }
+
+
   @Test
   public void testExecuteDDLUpdates() throws Exception {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
index 9443e16..449c950 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
@@ -59,6 +59,16 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
   @patch("sys.exit")
@@ -96,4 +106,14 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()