You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2014/12/17 21:06:03 UTC

[31/37] ambari git commit: AMBARI-8746: Common Services: Refactor HDP 2.0.6 HIVE, OOZIE services (Jayush Luniya)

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
new file mode 100644
index 0000000..da0f60b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+HIVE_CONF_DIR=$4 hive --service metastore -hiveconf hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
+echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
new file mode 100644
index 0000000..22202ee
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export ttonhost=$1
+export smoke_test_user=$2
+export smoke_user_keytab=$3
+export security_enabled=$4
+export kinit_path_local=$5
+export ttonurl="http://${ttonhost}:50111/templeton/v1"
+
+if [[ $security_enabled == "true" ]]; then
+  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
+else
+  kinitcmd=""
+fi
+
+export no_proxy=$ttonhost
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0
+
+#try hcat ddl command
+echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit  1
+fi
+
+# NOT SURE?? SUHAS
+if [[ $security_enabled == "true" ]]; then
+  echo "Templeton Pig Smoke Tests not run in secure mode"
+  exit 0
+fi
+
+#try pig query
+outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
+ttonTestOutput="/tmp/idtest.${outname}.out";
+ttonTestInput="/tmp/idtest.${outname}.in";
+ttonTestScript="idtest.${outname}.pig"
+
+echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
+echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
+echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
+
+#copy pig script to hdfs
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
+#copy input file to hdfs
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+
+#create, copy post args file
+echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
+
+#submit pig query
+cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+retVal=`sudo su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/metainfo.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/metainfo.xml
new file mode 100644
index 0000000..26edb84
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/metainfo.xml
@@ -0,0 +1,294 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HIVE</name>
+      <displayName>Hive</displayName>
+      <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
+      <version>0.12.0.2.0</version>
+      <components>
+
+        <component>
+          <name>HIVE_METASTORE</name>
+          <displayName>Hive Metastore</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <clientsToUpdateConfigs></clientsToUpdateConfigs>
+          <auto-deploy>
+            <enabled>true</enabled>
+            <co-locate>HIVE/HIVE_SERVER</co-locate>
+          </auto-deploy>
+          <commandScript>
+            <script>scripts/hive_metastore.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <configuration-dependencies>
+            <config-type>hive-site</config-type>
+          </configuration-dependencies>
+        </component>
+
+        <component>
+          <name>HIVE_SERVER</name>
+          <displayName>HiveServer2</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <clientsToUpdateConfigs></clientsToUpdateConfigs>
+          <dependencies>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/HIVE_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/hive_server.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configuration-dependencies>
+            <config-type>hive-site</config-type>
+          </configuration-dependencies>
+        </component>
+        <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <clientsToUpdateConfigs>
+            <client>HCAT</client>
+          </clientsToUpdateConfigs>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>PIG/PIG</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <configuration-dependencies>
+            <config-type>hive-site</config-type>
+          </configuration-dependencies>
+        </component>
+        <component>
+          <name>MYSQL_SERVER</name>
+          <displayName>MySQL Server</displayName>
+          <category>MASTER</category>
+          <cardinality>0-1</cardinality>
+          <clientsToUpdateConfigs></clientsToUpdateConfigs>
+          <commandScript>
+            <script>scripts/mysql_server.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <customCommands>
+            <customCommand>
+              <name>CLEAN</name>
+                <commandScript>
+                  <script>scripts/mysql_server.py</script>
+                  <scriptType>PYTHON</scriptType>
+                  <timeout>600</timeout>
+            </commandScript>
+                </customCommand>
+          </customCommands>
+        </component>
+
+        <component>
+          <name>HIVE_CLIENT</name>
+          <displayName>Hive Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/hive_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>xml</type>
+              <fileName>hive-site.xml</fileName>
+              <dictionaryName>hive-site</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-env.sh</fileName>
+              <dictionaryName>hive-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-log4j.properties</fileName>
+              <dictionaryName>hive-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-exec-log4j.properties</fileName>
+              <dictionaryName>hive-exec-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+          <configuration-dependencies>
+            <config-type>hive-site</config-type>
+          </configuration-dependencies>
+        </component>
+        <component>
+          <name>HCAT</name>
+          <displayName>HCat Client</displayName>
+          <category>CLIENT</category>
+          <commandScript>
+            <script>scripts/hcat_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>hcat-env.sh</fileName>
+              <dictionaryName>hcat-env</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+        <configuration-dependencies>
+          <config-type>hive-site</config-type>
+        </configuration-dependencies>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hive</name>
+            </package>
+            <package>
+              <name>hcatalog</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>ZOOKEEPER</service>
+        <service>YARN</service>
+        <service>TEZ</service>
+      </requiredServices>
+
+      <configuration-dependencies>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>hive-env</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
+      </configuration-dependencies>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/__init__.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/__init__.py
new file mode 100644
index 0000000..5561e10
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
new file mode 100644
index 0000000..31c1673
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+
+
+def hcat():
+  import params
+
+  Directory(params.hive_conf_dir,
+            recursive=True,
+            owner=params.hcat_user,
+            group=params.user_group,
+  )
+
+
+  Directory(params.hcat_conf_dir,
+            recursive=True,
+            owner=params.hcat_user,
+            group=params.user_group,
+  )
+
+  Directory(params.hcat_pid_dir,
+            owner=params.webhcat_user,
+            recursive=True
+  )
+
+  XmlConfig("hive-site.xml",
+            conf_dir=params.hive_client_conf_dir,
+            configurations=params.config['configurations']['hive-site'],
+            configuration_attributes=params.config['configuration_attributes']['hive-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+
+  File(format("{hcat_conf_dir}/hcat-env.sh"),
+       owner=params.hcat_user,
+       group=params.user_group,
+       content=InlineTemplate(params.hcat_env_sh_template)
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
new file mode 100644
index 0000000..8b5921a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from hcat import hcat
+
+class HCatClient(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+
+    hcat()
+
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+
+if __name__ == "__main__":
+  HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
new file mode 100644
index 0000000..fd4c6ca
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions import get_unique_id_and_date
+
+def hcat_service_check():
+    import params
+    unique = get_unique_id_and_date()
+    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
+    test_cmd = format("fs -test -e {output_file}")
+
+    if params.security_enabled:
+      kinit_cmd = format(
+        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+    else:
+      kinit_cmd = ""
+
+    File(format("{tmp_dir}/hcatSmoke.sh"),
+         content=StaticFile("hcatSmoke.sh"),
+         mode=0755
+    )
+
+    prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
+
+    Execute(prepare_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
+            logoutput=True)
+
+    if params.security_enabled:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    principal=params.hdfs_principal_name,
+                    bin_dir=params.execute_path
+      )
+    else:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    bin_dir=params.execute_path
+      )
+
+    cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
+
+    Execute(cleanup_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
+            logoutput=True
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
new file mode 100644
index 0000000..2dbdd6f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def hive(name=None):
+  import params
+
+  if name == 'hiveserver2':
+
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=0777
+    )
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=params.hive_hdfs_user_mode
+    )
+    params.HdfsDirectory(None, action="create")
+
+  Directory(params.hive_conf_dir_prefix,
+            mode=0755
+  )
+
+  # We should change configurations for client as well as for server.
+  # The reason is that stale-configs are service-level, not component.
+  for conf_dir in params.hive_conf_dirs_list:
+    fill_conf_dir(conf_dir)
+
+  XmlConfig("hive-site.xml",
+            conf_dir=params.hive_config_dir,
+            configurations=params.config['configurations']['hive-site'],
+            configuration_attributes=params.config['configuration_attributes']['hive-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+  
+  if params.hive_specific_configs_supported and name == 'hiveserver2':
+    XmlConfig("hiveserver2-site.xml",
+              conf_dir=params.hive_server_conf_dir,
+              configurations=params.config['configurations']['hiveserver2-site'],
+              configuration_attributes=params.config['configuration_attributes']['hiveserver2-site'],
+              owner=params.hive_user,
+              group=params.user_group,
+              mode=0644)
+  
+  File(format("{hive_config_dir}/hive-env.sh"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=InlineTemplate(params.hive_env_sh_template)
+  )
+
+  if name == 'metastore' or name == 'hiveserver2':
+    jdbc_connector()
+    
+  environment = {
+    "no_proxy": format("{ambari_server_hostname}")
+  }
+
+  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" "
+               "--retry 5 "
+               "{jdk_location}{check_db_connection_jar_name} "
+               "-o {check_db_connection_jar_name}'")
+
+  Execute(cmd,
+          not_if=format("[ -f {check_db_connection_jar} ]"),
+          environment = environment)
+
+  if name == 'metastore':
+    File(params.start_metastore_path,
+         mode=0755,
+         content=StaticFile('startMetastore.sh')
+    )
+    if params.init_metastore_schema:
+      create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
+                                 "{hive_bin}/schematool -initSchema "
+                                 "-dbType {hive_metastore_db_type} "
+                                 "-userName {hive_metastore_user_name} "
+                                 "-passWord {hive_metastore_user_passwd!p}")
+
+      check_schema_created_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
+                                        "{hive_bin}/schematool -info "
+                                        "-dbType {hive_metastore_db_type} "
+                                        "-userName {hive_metastore_user_name} "
+                                        "-passWord {hive_metastore_user_passwd!p}")
+
+      Execute(create_schema_cmd,
+              not_if = check_schema_created_cmd
+      )
+  elif name == 'hiveserver2':
+    File(params.start_hiveserver2_path,
+         mode=0755,
+         content=Template(format('{start_hiveserver2_script}'))
+    )
+
+  if name != "client":
+    crt_directory(params.hive_pid_dir)
+    crt_directory(params.hive_log_dir)
+    crt_directory(params.hive_var_lib)
+
+def fill_conf_dir(component_conf_dir):
+  import params
+  
+  Directory(component_conf_dir,
+            owner=params.hive_user,
+            group=params.user_group,
+            recursive=True
+  )
+
+  XmlConfig("mapred-site.xml",
+            conf_dir=component_conf_dir,
+            configurations=params.config['configurations']['mapred-site'],
+            configuration_attributes=params.config['configuration_attributes']['mapred-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+
+
+  crt_file(format("{component_conf_dir}/hive-default.xml.template"))
+  crt_file(format("{component_conf_dir}/hive-env.sh.template"))
+
+  log4j_exec_filename = 'hive-exec-log4j.properties'
+  if (params.log4j_exec_props != None):
+    File(format("{component_conf_dir}/{log4j_exec_filename}"),
+         mode=0644,
+         group=params.user_group,
+         owner=params.hive_user,
+         content=params.log4j_exec_props
+    )
+  elif (os.path.exists("{component_conf_dir}/{log4j_exec_filename}.template")):
+    File(format("{component_conf_dir}/{log4j_exec_filename}"),
+         mode=0644,
+         group=params.user_group,
+         owner=params.hive_user,
+         content=StaticFile(format("{component_conf_dir}/{log4j_exec_filename}.template"))
+    )
+
+  log4j_filename = 'hive-log4j.properties'
+  if (params.log4j_props != None):
+    File(format("{component_conf_dir}/{log4j_filename}"),
+         mode=0644,
+         group=params.user_group,
+         owner=params.hive_user,
+         content=params.log4j_props
+    )
+  elif (os.path.exists("{component_conf_dir}/{log4j_filename}.template")):
+    File(format("{component_conf_dir}/{log4j_filename}"),
+         mode=0644,
+         group=params.user_group,
+         owner=params.hive_user,
+         content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
+    )
+
+
+def crt_directory(name):
+  import params
+
+  Directory(name,
+            recursive=True,
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+
+
+def crt_file(name):
+  import params
+
+  File(name,
+       owner=params.hive_user,
+       group=params.user_group
+  )
+
+
+def jdbc_connector():
+  import params
+
+  if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
+    File(params.target,
+         action="delete",
+    )
+    Execute(('cp', format('/usr/share/java/{jdbc_jar_name}'), params.target),
+            not_if=format("test -f {target}"),
+            creates=params.target,
+            path=["/bin", "/usr/bin/"],
+            sudo=True
+    )
+  elif params.hive_jdbc_driver == "org.postgresql.Driver":
+    cmd = format("mkdir -p {artifact_dir} ; rm -f {target} ; cp /usr/share/java/{jdbc_jar_name} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            creates=params.target,
+            environment= {'PATH' : params.execute_path },
+            path=["/bin", "usr/bin/"])
+
+  elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+    environment = {
+      "no_proxy": format("{ambari_server_hostname}")
+    }
+
+    cmd = format(
+      "mkdir -p {artifact_dir} ; "
+      "curl -kf -x \"\" --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
+      "rm -f {target} ; "
+      "cp {driver_curl_target} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            path=["/bin", "/usr/bin/"],
+            environment=environment)

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
new file mode 100644
index 0000000..499f632
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import sys
+from resource_management import *
+
+from hive import hive
+
+class HiveClient(Script):
+  def install(self, env):
+    import params
+    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='client')
+
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
new file mode 100644
index 0000000..dc02a7d
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from hive import hive
+from hive_service import hive_service
+from mysql_service import mysql_service
+
+class HiveMetastore(Script):
+
+  def install(self, env):
+    import params
+    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='metastore')
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service( 'metastore',
+                   action = 'start'
+    )
+
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+
+    hive_service( 'metastore',
+                   action = 'stop'
+    )
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+if __name__ == "__main__":
+  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
new file mode 100644
index 0000000..fa8ece4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from hive import hive
+from hive_service import hive_service
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from install_jars import install_tez_jars
+
+class HiveServer(Script):
+
+  def install(self, env):
+    import params
+    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
+      install_tez_jars()
+    hive(name='hiveserver2')
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+
+    # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
+    copy_tarballs_to_hdfs('mapreduce', params.tez_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user, params.user_group)
+
+    hive_service( 'hiveserver2',
+                  action = 'start'
+    )
+
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+
+    hive_service( 'hiveserver2',
+                  action = 'stop'
+    )
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+
+if __name__ == "__main__":
+  HiveServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
new file mode 100644
index 0000000..8e5d878
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+import time
+from resource_management.core import shell
+
+def hive_service(
+    name,
+    action='start'):
+
+  import params
+
+  if name == 'metastore':
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    cmd = format(
+      "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
+  elif name == 'hiveserver2':
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    cmd = format(
+      "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
+
+  process_id_exists = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
+  
+  if action == 'start':
+    if name == 'hiveserver2':
+      check_fs_root()
+
+    demon_cmd = format("{cmd}")
+    
+    Execute(demon_cmd,
+            user=params.hive_user,
+            environment={'HADOOP_HOME': params.hadoop_home},
+            path=params.execute_path,
+            not_if=process_id_exists
+    )
+
+    if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
+       params.hive_jdbc_driver == "org.postgresql.Driver" or \
+       params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+      
+      db_connection_check_command = format(
+        "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification '{hive_jdbc_connection_url}' {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_jdbc_driver}")
+      
+      Execute(db_connection_check_command,
+              path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', tries=5, try_sleep=10)
+      
+    # AMBARI-5800 - wait for the server to come up instead of just the PID existance
+    if name == 'hiveserver2':
+      SOCKET_WAIT_SECONDS = 120
+      address=params.hive_server_host
+      port=int(params.hive_server_port)
+      
+      start_time = time.time()
+      end_time = start_time + SOCKET_WAIT_SECONDS
+
+      is_service_socket_valid = False
+      print "Waiting for the Hive server to start..."
+      if params.security_enabled:
+        kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+      else:
+        kinitcmd=None
+      while time.time() < end_time:
+        try:
+          check_thrift_port_sasl(address, port, params.hive_server2_authentication,
+                                 params.hive_server_principal, kinitcmd, params.smokeuser)
+          is_service_socket_valid = True
+          break
+        except Exception, e:
+          time.sleep(5)
+
+      elapsed_time = time.time() - start_time
+      
+      if is_service_socket_valid == False: 
+        raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsed_time))
+      
+      print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time)    
+            
+  elif action == 'stop':
+    demon_cmd = format("sudo kill `cat {pid_file}`")
+    Execute(demon_cmd,
+         not_if = format("! ({process_id_exists})")
+    )
+    File(pid_file,
+         action = "delete",
+    )
+
+def check_fs_root():
+  import params  
+  fs_root_url = format("{fs_root}{hive_apps_whs_dir}")
+  cmd = format("metatool -listFSRoot 2>/dev/null | grep hdfs:// | grep -v '.db$'")
+  code, out = shell.call(cmd, user=params.hive_user)
+  if code == 0 and fs_root_url.strip() != out.strip():
+    cmd = format("metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}")
+    Execute(cmd,
+            environment= {'PATH' : params.execute_path },
+            user=params.hive_user)

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
new file mode 100644
index 0000000..a18ca72
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import os
+
+def install_tez_jars():
+  import params
+
+  destination_hdfs_dirs = get_tez_hdfs_dir_paths(params.tez_lib_uris)
+
+  # If tez libraries are to be stored in hdfs
+  if destination_hdfs_dirs:
+    for hdfs_dir in destination_hdfs_dirs:
+      params.HdfsDirectory(hdfs_dir,
+                           action="create_delayed",
+                           owner=params.tez_user,
+                           mode=0755
+      )
+    pass
+    params.HdfsDirectory(None, action="create")
+
+    if params.security_enabled:
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+    else:
+      kinit_if_needed = ""
+
+    if kinit_if_needed:
+      Execute(kinit_if_needed,
+              user=params.tez_user,
+              path='/bin'
+      )
+    pass
+
+    app_dir_path = None
+    lib_dir_path = None
+
+    if len(destination_hdfs_dirs) > 0:
+      for path in destination_hdfs_dirs:
+        if 'lib' in path:
+          lib_dir_path = path
+        else:
+          app_dir_path = path
+        pass
+      pass
+    pass
+
+    if app_dir_path:
+      for scr_file, dest_file in params.app_dir_files.iteritems():
+        CopyFromLocal(scr_file,
+                      mode=0755,
+                      owner=params.tez_user,
+                      dest_dir=app_dir_path,
+                      dest_file=dest_file,
+                      kinnit_if_needed=kinit_if_needed,
+                      hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
+                      hadoop_conf_dir=params.hadoop_conf_dir
+        )
+
+    if lib_dir_path:
+      CopyFromLocal(params.tez_local_lib_jars,
+                    mode=0755,
+                    owner=params.tez_user,
+                    dest_dir=lib_dir_path,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+    pass
+
+
+def get_tez_hdfs_dir_paths(tez_lib_uris = None):
+  hdfs_path_prefix = 'hdfs://'
+  lib_dir_paths = []
+  if tez_lib_uris and tez_lib_uris.strip().find(hdfs_path_prefix, 0) != -1:
+    dir_paths = tez_lib_uris.split(',')
+    for path in dir_paths:
+      if not "tez.tar.gz" in path:
+        lib_dir_path = path.replace(hdfs_path_prefix, '')
+        lib_dir_path = lib_dir_path if lib_dir_path.endswith(os.sep) else lib_dir_path + os.sep
+        lib_dir_paths.append(lib_dir_path)
+      else:
+        lib_dir_path = path.replace(hdfs_path_prefix, '')
+        lib_dir_paths.append(os.path.dirname(lib_dir_path))
+    pass
+  pass
+
+  return lib_dir_paths

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py
new file mode 100644
index 0000000..91a699b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_server.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from mysql_service import mysql_service
+
+
+class MysqlServer(Script):
+  def install(self, env):
+    import params
+    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
+    self.configure(env)
+
+  def clean(self, env):
+    import params, mysql_users
+    env.set_params(params)
+    mysql_users.mysql_deluser(params)
+
+  def configure(self, env):
+    import params, mysql_users
+    env.set_params(params)
+    mysql_users.mysql_adduser(params)
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    mysql_service(daemon_name=params.daemon_name, action='start')
+
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    mysql_service(daemon_name=params.daemon_name, action='stop')
+
+  def status(self, env):
+    import status_params
+
+    mysql_service(daemon_name=status_params.daemon_name, action='status')
+
+
+if __name__ == "__main__":
+  MysqlServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_service.py
new file mode 100644
index 0000000..2f0c6f6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_service.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+
+def mysql_service(daemon_name=None, action='start'): 
+  status_cmd = format('service {daemon_name} status | grep running')
+  cmd = ('service', daemon_name, action)
+
+  if action == 'status':
+    Execute(status_cmd)
+  elif action == 'stop':
+    import params
+    Execute(cmd,
+            logoutput = True,
+            only_if = status_cmd,
+            sudo = True,
+    )
+  elif action == 'start':
+    import params
+    # required for running hive
+    replace_bind_address = ('sed','-i','s|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|',params.mysql_configname)
+    Execute(replace_bind_address,
+            sudo = True,
+    )
+    
+    Execute(cmd,
+      logoutput = True,
+      not_if = status_cmd,
+      sudo = True,
+    )
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_users.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_users.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_users.py
new file mode 100644
index 0000000..9961c18
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/mysql_users.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+# Used to add hive access to the needed components
+def mysql_adduser(params):
+  File(params.mysql_adduser_path,
+       mode=0755,
+       content=StaticFile('addMysqlUser.sh')
+  )
+  hive_server_host = format("{hive_server_host}")
+  hive_metastore_host = format("{hive_metastore_host}")
+
+  add_metastore_cmd = "bash -x {mysql_adduser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_metastore_host}"
+  add_hiveserver_cmd = "bash -x {mysql_adduser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_server_host}"
+  if (hive_server_host == hive_metastore_host):
+    cmd = format(add_hiveserver_cmd)
+  else:
+    cmd = format(add_hiveserver_cmd + ";" + add_metastore_cmd)
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
+  )
+
+# Removes hive access from components
+def mysql_deluser(params):
+  File(params.mysql_deluser_path,
+       mode=0755,
+       content=StaticFile('removeMysqlUser.sh')
+  )
+  hive_server_host = format("{hive_server_host}")
+  hive_metastore_host = format("{hive_metastore_host}")
+
+  del_hiveserver_cmd = "bash -x {mysql_deluser_path} {daemon_name} {hive_metastore_user_name} {hive_server_host}"
+  del_metastore_cmd = "bash -x {mysql_deluser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_host}"
+  if (hive_server_host == hive_metastore_host):
+    cmd = format(del_hiveserver_cmd)
+  else:
+    cmd = format(
+      del_hiveserver_cmd + ";" + del_metastore_cmd)
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
+  )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
new file mode 100644
index 0000000..bb7f1f4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management import *
+import status_params
+import os
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+# This is expected to be of the form #.#.#.#
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
+stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
+
+# Hadoop params
+# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
+  hive_bin = '/usr/hdp/current/hive-client/bin'
+  hive_lib = '/usr/hdp/current/hive-client/lib'
+
+  hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
+  webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+  
+  hive_specific_configs_supported = True
+else:
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+  hive_bin = '/usr/lib/hive/bin'
+  hive_lib = '/usr/lib/hive/lib/'
+  pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
+  hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
+  sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+  # for newer versions
+  else:
+    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+    
+  hive_specific_configs_supported = False
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hive_conf_dir_prefix = "/etc/hive"
+hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
+
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+  hcat_conf_dir = '/etc/hcatalog/conf'
+  config_dir = '/etc/hcatalog/conf'
+# for newer versions
+else:
+  hcat_conf_dir = '/etc/hive-hcatalog/conf'
+  config_dir = '/etc/hive-webhcat/conf'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+
+#users
+hive_user = config['configurations']['hive-env']['hive_user']
+#JDBC driver jar name
+hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
+if hive_jdbc_driver == "com.mysql.jdbc.Driver":
+  jdbc_jar_name = "mysql-connector-java.jar"
+  jdbc_symlink_name = "mysql-jdbc-driver.jar"
+elif hive_jdbc_driver == "org.postgresql.Driver":
+  jdbc_jar_name = "postgresql-jdbc.jar"
+  jdbc_symlink_name = "postgres-jdbc-driver.jar"
+elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+  jdbc_jar_name = "ojdbc6.jar"
+  jdbc_symlink_name = "oracle-jdbc-driver.jar"
+
+check_db_connection_jar_name = "DBConnectionVerification.jar"
+check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+
+#common
+hive_metastore_host = config['clusterHostInfo']['hive_metastore_host'][0]
+hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
+hive_var_lib = '/var/lib/hive'
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
+hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
+hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
+hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
+
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
+smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+
+fs_root = config['configurations']['core-site']['fs.defaultFS']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+
+#hive_env
+hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
+hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
+hive_pid_dir = status_params.hive_pid_dir
+hive_pid = status_params.hive_pid
+#Default conf dir for client
+hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
+
+if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
+  hive_config_dir = hive_server_conf_dir
+else:
+  hive_config_dir = hive_client_conf_dir
+
+#hive-site
+hive_database_name = config['configurations']['hive-env']['hive_database_name']
+
+#Starting hiveserver2
+start_hiveserver2_script = 'startHiveserver2.sh.j2'
+
+##Starting metastore
+start_metastore_script = 'startMetastore.sh'
+hive_metastore_pid = status_params.hive_metastore_pid
+java_share_dir = '/usr/share/java'
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+
+target = format("{hive_lib}/{jdbc_jar_name}")
+
+jdk_location = config['hostLevelParams']['jdk_location']
+driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
+
+start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
+start_metastore_path = format("{tmp_dir}/start_metastore_script")
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
+java64_home = config['hostLevelParams']['java_home']
+
+##### MYSQL
+
+db_name = config['configurations']['hive-env']['hive_database_name']
+mysql_group = 'mysql'
+mysql_host = config['clusterHostInfo']['hive_mysql_host']
+
+mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
+mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
+
+######## Metastore Schema
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+  init_metastore_schema = False
+else:
+  init_metastore_schema = True
+
+########## HCAT
+
+hcat_dbroot = hcat_lib
+
+hcat_user = config['configurations']['hive-env']['hcat_user']
+webhcat_user = config['configurations']['hive-env']['webhcat_user']
+
+hcat_pid_dir = status_params.hcat_pid_dir
+hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+hcat_env_sh_template = config['configurations']['hcat-env']['content']
+
+#hive-log4j.properties.template
+if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
+  log4j_props = config['configurations']['hive-log4j']['content']
+else:
+  log4j_props = None
+
+#hive-exec-log4j.properties.template
+if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
+  log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
+else:
+  log4j_exec_props = None
+
+daemon_name = status_params.daemon_name
+hive_env_sh_template = config['configurations']['hive-env']['content']
+
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+#for create_hdfs_directory
+hostname = config["hostname"]
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
+if System.get_instance().os_family == "ubuntu":
+  mysql_configname = '/etc/mysql/my.cnf'
+else:
+  mysql_configname = '/etc/my.cnf'
+  
+mysql_user = 'mysql'
+
+# Hive security
+hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
+
+mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
+
+# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
+# trying to install mysql-connector-java upon them can cause packages to conflict.
+if os.path.exists(mysql_jdbc_driver_jar):
+  hive_exclude_packages = ['mysql-connector-java']
+else:  
+  hive_exclude_packages = []
+
+########################################################
+########### WebHCat related params #####################
+########################################################
+
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.hcat_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_principal_name if security_enabled else hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
new file mode 100644
index 0000000..945a676
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import socket
+import sys
+
+from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
+
+class HiveServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    address=format("{hive_server_host}")
+    port=int(format("{hive_server_port}"))
+    print "Test connectivity to hive server"
+    if params.security_enabled:
+      kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+    else:
+      kinitcmd=None
+
+    try:
+      check_thrift_port_sasl(address, port, params.hive_server2_authentication,
+                             params.hive_server_principal, kinitcmd, params.smokeuser)
+      print "Successfully connected to %s on port %s" % (address, port)
+    except:
+      print "Connection to %s on port %s failed" % (address, port)
+      exit(1)
+
+    hcat_service_check()
+    webhcat_service_check()
+
+if __name__ == "__main__":
+  HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
new file mode 100644
index 0000000..e6f2514
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+
+hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
+hive_pid = 'hive-server.pid'
+
+hive_metastore_pid = 'hive.pid'
+
+hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
+
+if System.get_instance().os_family == "suse" or System.get_instance().os_family == "ubuntu":
+  daemon_name = 'mysql'
+else:
+  daemon_name = 'mysqld'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
new file mode 100644
index 0000000..c02bf74
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -0,0 +1,143 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import sys
+import os.path
+import glob
+
+from resource_management import *
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+
+
+def webhcat():
+  import params
+
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+    params.HdfsDirectory(params.webhcat_apps_dir,
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=0755
+    )
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
+  Directory(params.templeton_pid_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.templeton_log_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.config_dir,
+            recursive=True,
+            owner=params.webhcat_user,
+            group=params.user_group)
+
+  if params.security_enabled:
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+  else:
+    kinit_if_needed = ""
+
+  if kinit_if_needed:
+    Execute(kinit_if_needed,
+            user=params.webhcat_user,
+            path='/bin'
+    )
+
+  # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script specific to that stack.
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, params.user_group)
+  else:
+    CopyFromLocal(params.hadoop_streeming_jars,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (os.path.isfile(params.pig_tar_file)):
+      CopyFromLocal(params.pig_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+    CopyFromLocal(params.hive_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (len(glob.glob(params.sqoop_tar_file)) > 0):
+      CopyFromLocal(params.sqoop_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+  XmlConfig("webhcat-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['webhcat-site'],
+            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
+            owner=params.webhcat_user,
+            group=params.user_group,
+            )
+
+  File(format("{config_dir}/webhcat-env.sh"),
+       owner=params.webhcat_user,
+       group=params.user_group,
+       content=InlineTemplate(params.webhcat_env_sh_template)
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
new file mode 100644
index 0000000..a8b3a8f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -0,0 +1,53 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import sys
+from resource_management import *
+
+from webhcat import webhcat
+from webhcat_service import webhcat_service
+
+class WebHCatServer(Script):
+  def install(self, env):
+    self.install_packages(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    webhcat()
+
+  def start(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    webhcat_service(action = 'start')
+
+  def stop(self, env, rolling_restart=False):
+    import params
+    env.set_params(params)
+
+    webhcat_service(action = 'stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.webhcat_pid_file)
+
+if __name__ == "__main__":
+  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
new file mode 100644
index 0000000..25a60e8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -0,0 +1,42 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+def webhcat_service(action='start'):
+  import params
+
+  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
+
+  if action == 'start':
+    demon_cmd = format('{cmd} start')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
+    Execute(demon_cmd,
+            user=params.webhcat_user,
+            not_if=no_op_test
+    )
+  elif action == 'stop':
+    demon_cmd = format('{cmd} stop')
+    Execute(demon_cmd,
+            user=params.webhcat_user
+    )
+    File(params.webhcat_pid_file,
+         action="delete",
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/46e9bcb8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
new file mode 100644
index 0000000..8d15e47
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+