You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2015/08/17 07:13:35 UTC

[10/23] ambari git commit: AMBARI-12779: [PluggableStackDefinition] Remove ambari-server/src/main/resources/stacks/PHD (jluniya)

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/pigSmoke.sh b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/pigSmoke.sh
deleted file mode 100644
index 2e90ac0..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/pigSmoke.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-A = load 'passwd' using PigStorage(':');
-B = foreach A generate \$0 as id;
-store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/startMetastore.sh b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/startMetastore.sh
deleted file mode 100644
index da0f60b..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/startMetastore.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-HIVE_CONF_DIR=$4 hive --service metastore -hiveconf hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/templetonSmoke.sh
deleted file mode 100644
index e26148b..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/files/templetonSmoke.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export ttonhost=$1
-export smoke_test_user=$2
-export smoke_user_keytab=$3
-export security_enabled=$4
-export kinit_path_local=$5
-export ttonurl="http://${ttonhost}:50111/templeton/v1"
-
-if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else
-  kinitcmd=""
-fi
-
-export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0
-
-#try hcat ddl command
-echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit  1
-fi
-
-# NOT SURE?? SUHAS
-if [[ $security_enabled == "true" ]]; then
-  echo "Templeton Pig Smoke Tests not run in secure mode"
-  exit 0
-fi
-
-#try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
-ttonTestScript="idtest.${outname}.pig"
-
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-su -s /bin/bash - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
-#create, copy post args file
-echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
-
-#submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
-retVal=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat.py
deleted file mode 100644
index 31c1673..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-
-
-def hcat():
-  import params
-
-  Directory(params.hive_conf_dir,
-            recursive=True,
-            owner=params.hcat_user,
-            group=params.user_group,
-  )
-
-
-  Directory(params.hcat_conf_dir,
-            recursive=True,
-            owner=params.hcat_user,
-            group=params.user_group,
-  )
-
-  Directory(params.hcat_pid_dir,
-            owner=params.webhcat_user,
-            recursive=True
-  )
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_client_conf_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-  File(format("{hcat_conf_dir}/hcat-env.sh"),
-       owner=params.hcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hcat_env_sh_template)
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_client.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_client.py
deleted file mode 100644
index 8b5921a..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_client.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from hcat import hcat
-
-class HCatClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-
-    hcat()
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-if __name__ == "__main__":
-  HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_service_check.py
deleted file mode 100644
index 081352a..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hcat_service_check.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.libraries.functions import get_unique_id_and_date
-
-def hcat_service_check():
-    import params
-    unique = get_unique_id_and_date()
-    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
-    test_cmd = format("fs -test -e {output_file}")
-
-    if params.security_enabled:
-      kinit_cmd = format(
-        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
-    else:
-      kinit_cmd = ""
-
-    File(format("{tmp_dir}/hcatSmoke.sh"),
-         content=StaticFile("hcatSmoke.sh"),
-         mode=0755
-    )
-
-    prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
-
-    Execute(prepare_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
-            logoutput=True)
-
-    if params.security_enabled:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab,
-                    principal=params.hdfs_principal_name,
-                    bin_dir=params.execute_path
-      )
-    else:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab,
-                    bin_dir=params.execute_path
-      )
-
-    cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
-
-    Execute(cleanup_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
-            logoutput=True
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive.py
deleted file mode 100644
index e388ee5..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-import os
-
-
-def hive(name=None):
-  import params
-
-  if name == 'hiveserver2':
-
-    params.HdfsDirectory(params.hive_apps_whs_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=0777
-    )
-    params.HdfsDirectory(params.hive_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=params.hive_hdfs_user_mode
-    )
-    params.HdfsDirectory(None, action="create")
-  
-  # We should change configurations for client as well as for server.
-  # The reason is that stale-configs are service-level, not component.
-  for conf_dir in params.hive_conf_dirs_list:
-    fill_conf_dir(conf_dir)
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_config_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-  File(format("{hive_config_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hive_env_sh_template)
-  )
-
-  if name == 'metastore' or name == 'hiveserver2':
-    jdbc_connector()
-    
-  environment = {
-    "no_proxy": format("{ambari_server_hostname}")
-  }
-
-  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" "
-               "--retry 5 "
-               "{jdk_location}{check_db_connection_jar_name} "
-               "-o {check_db_connection_jar_name}'")
-
-  Execute(cmd,
-          not_if=format("[ -f {check_db_connection_jar_name}]"),
-          environment = environment)
-
-  if name == 'metastore':
-    File(params.start_metastore_path,
-         mode=0755,
-         content=StaticFile('startMetastore.sh')
-    )
-    if params.init_metastore_schema:
-      create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                 "{hive_bin}/schematool -initSchema "
-                                 "-dbType {hive_metastore_db_type} "
-                                 "-userName {hive_metastore_user_name} "
-                                 "-passWord {hive_metastore_user_passwd!p}")
-
-      check_schema_created_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                        "{hive_bin}/schematool -info "
-                                        "-dbType {hive_metastore_db_type} "
-                                        "-userName {hive_metastore_user_name} "
-                                        "-passWord {hive_metastore_user_passwd!p}")
-
-      Execute(create_schema_cmd,
-              not_if = check_schema_created_cmd
-      )
-  elif name == 'hiveserver2':
-    File(params.start_hiveserver2_path,
-         mode=0755,
-         content=Template(format('{start_hiveserver2_script}'))
-    )
-
-  if name != "client":
-    crt_directory(params.hive_pid_dir)
-    crt_directory(params.hive_log_dir)
-    crt_directory(params.hive_var_lib)
-
-def fill_conf_dir(component_conf_dir):
-  import params
-  
-  Directory(component_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-
-  XmlConfig("mapred-site.xml",
-            conf_dir=component_conf_dir,
-            configurations=params.config['configurations']['mapred-site'],
-            configuration_attributes=params.config['configuration_attributes']['mapred-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-
-  crt_file(format("{component_conf_dir}/hive-default.xml.template"))
-  crt_file(format("{component_conf_dir}/hive-env.sh.template"))
-
-  log4j_exec_filename = 'hive-exec-log4j.properties'
-  if (params.log4j_exec_props != None):
-    File(format("{component_conf_dir}/{log4j_exec_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=params.log4j_exec_props
-    )
-  elif (os.path.exists("{component_conf_dir}/{log4j_exec_filename}.template")):
-    File(format("{component_conf_dir}/{log4j_exec_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=StaticFile(format("{component_conf_dir}/{log4j_exec_filename}.template"))
-    )
-
-  log4j_filename = 'hive-log4j.properties'
-  if (params.log4j_props != None):
-    File(format("{component_conf_dir}/{log4j_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=params.log4j_props
-    )
-  elif (os.path.exists("{component_conf_dir}/{log4j_filename}.template")):
-    File(format("{component_conf_dir}/{log4j_filename}"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hive_user,
-         content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
-    )
-
-
-def crt_directory(name):
-  import params
-
-  Directory(name,
-            recursive=True,
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-
-
-def crt_file(name):
-  import params
-
-  File(name,
-       owner=params.hive_user,
-       group=params.user_group
-  )
-
-
-def jdbc_connector():
-  import params
-
-  if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
-    cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
-
-    Execute(cmd,
-            not_if=format("test -f {target}"),
-            creates=params.target,
-            environment= {'PATH' : params.execute_path },
-            path=["/bin", "/usr/bin/"])
-  elif params.hive_jdbc_driver == "org.postgresql.Driver":
-    cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
-
-    Execute(cmd,
-            not_if=format("test -f {target}"),
-            creates=params.target,
-            environment= {'PATH' : params.execute_path },
-            path=["/bin", "usr/bin/"])
-
-  elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-    environment = {
-      "no_proxy": format("{ambari_server_hostname}")
-    }
-
-    cmd = format(
-      "mkdir -p {artifact_dir} ; "
-      "curl -kf -x \"\" --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
-      "cp {driver_curl_target} {target}")
-
-    Execute(cmd,
-            not_if=format("test -f {target}"),
-            path=["/bin", "/usr/bin/"],
-            environment=environment)

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_client.py
deleted file mode 100644
index 499f632..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_client.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-from resource_management import *
-
-from hive import hive
-
-class HiveClient(Script):
-  def install(self, env):
-    import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='client')
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_metastore.py
deleted file mode 100644
index 6ee5507..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_metastore.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from hive import hive
-from hive_service import hive_service
-from mysql_service import mysql_service
-
-class HiveMetastore(Script):
-
-  def install(self, env):
-    import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='metastore')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service( 'metastore',
-                   action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'metastore',
-                   action = 'stop'
-    )
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-if __name__ == "__main__":
-  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_server.py
deleted file mode 100644
index 34f2d96..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_server.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hive import hive
-from hive_service import hive_service
-from install_jars import install_tez_jars
-
-class HiveServer(Script):
-
-  def install(self, env):
-    import params
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='hiveserver2')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    
-    install_tez_jars() # Put tez jars in hdfs
-
-    hive_service( 'hiveserver2',
-                  action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'hiveserver2',
-                  action = 'stop'
-    )
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-
-if __name__ == "__main__":
-  HiveServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_service.py
deleted file mode 100644
index 5463df4..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/hive_service.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-import time
-from resource_management.core.shell import call
-
-def hive_service(
-    name,
-    action='start'):
-
-  import params
-
-  if name == 'metastore':
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    cmd = format(
-      "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
-  elif name == 'hiveserver2':
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-    cmd = format(
-      "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
-
-  process_id_exists = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
-  
-  if action == 'start':
-    if name == 'hiveserver2':
-      check_fs_root()
-
-    demon_cmd = format("{cmd}")
-    
-    Execute(demon_cmd,
-            user=params.hive_user,
-            environment={'HADOOP_HOME': params.hadoop_home},
-            path=params.execute_path,
-            not_if=process_id_exists
-    )
-
-    if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
-       params.hive_jdbc_driver == "org.postgresql.Driver" or \
-       params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-      
-      db_connection_check_command = format(
-        "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification '{hive_jdbc_connection_url}' {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_jdbc_driver}")
-      
-      Execute(db_connection_check_command,
-              path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', tries=5, try_sleep=10)
-      
-    # AMBARI-5800 - wait for the server to come up instead of just the PID existance
-    if name == 'hiveserver2':
-      SOCKET_WAIT_SECONDS = 120
-      address=params.hive_server_host
-      port=int(params.hive_server_port)
-      
-      start_time = time.time()
-      end_time = start_time + SOCKET_WAIT_SECONDS
-
-      is_service_socket_valid = False
-      print "Waiting for the Hive server to start..."
-      while time.time() < end_time:
-        if check_thrift_port_sasl(address, port, 2, security_enabled=params.security_enabled):
-          is_service_socket_valid = True
-          break
-        else:
-          time.sleep(2)
-
-      elapsed_time = time.time() - start_time    
-      
-      if is_service_socket_valid == False: 
-        raise Fail("Connection to Hive server %s on port %s failed after %d seconds" % (address, port, elapsed_time))
-      
-      print "Successfully connected to Hive at %s on port %s after %d seconds" % (address, port, elapsed_time)    
-            
-  elif action == 'stop':
-    demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}")
-    Execute(demon_cmd,
-            not_if = format("! ({process_id_exists})")
-    )
-
-def check_fs_root():
-  import params  
-  fs_root_url = format("{fs_root}{hive_apps_whs_dir}")
-  cmd = format("metatool -listFSRoot 2>/dev/null | grep hdfs://")
-  code, out = call(cmd, user=params.hive_user)
-  if code == 0 and fs_root_url.strip() != out.strip():
-    cmd = format("metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}")
-    Execute(cmd,
-            environment= {'PATH' : params.execute_path },
-            user=params.hive_user)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/install_jars.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/install_jars.py
deleted file mode 100644
index 08a0a50..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/install_jars.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import os
-import fnmatch
-
-def install_tez_jars():
-  import params
-  
-  destination_hdfs_dirs = get_tez_hdfs_dir_paths(params.tez_lib_uris)
-
-  # If tez libraries are to be stored in hdfs
-  if destination_hdfs_dirs:
-    for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsDirectory(hdfs_dir,
-                          action="create_delayed",
-                          owner=params.tez_user,
-                          mode=0755
-      )
-    pass
-    params.HdfsDirectory(None, action="create")
-
-    if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-    else:
-      kinit_if_needed = ""
-
-    if kinit_if_needed:
-      Execute(kinit_if_needed,
-              user=params.tez_user,
-              path='/bin'
-      )
-    pass
-
-    app_dir_path = None
-    lib_dir_path = None
-
-    if len(destination_hdfs_dirs) > 0:
-      for path in destination_hdfs_dirs:
-        if 'lib' in path:
-          lib_dir_path = path
-        else:
-          app_dir_path = path
-        pass
-      pass
-    pass
-
-    if app_dir_path:
-      for scr_file, dest_file in params.app_dir_files.iteritems():
-        CopyFromLocal(scr_file,
-                      mode=0755,
-                      owner=params.tez_user,
-                      dest_dir=app_dir_path,
-                      dest_file=dest_file,
-                      kinnit_if_needed=kinit_if_needed,
-                      hdfs_user=params.hdfs_user,
-                      hadoop_bin_dir=params.hadoop_bin_dir,
-                      hadoop_conf_dir=params.hadoop_conf_dir
-        )
-
-    if lib_dir_path:
-      CopyFromLocal(params.tez_local_lib_jars,
-                    mode=0755,
-                    owner=params.tez_user,
-                    dest_dir=lib_dir_path,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-    pass
-
-
-def get_tez_hdfs_dir_paths(tez_lib_uris = None):
-  hdfs_path_prefix = 'hdfs://'
-  lib_dir_paths = []
-  if tez_lib_uris and tez_lib_uris.strip().find(hdfs_path_prefix, 0) != -1:
-    dir_paths = tez_lib_uris.split(',')
-    for path in dir_paths:
-      if not "tez.tar.gz" in path:
-        lib_dir_path = path.replace(hdfs_path_prefix, '')
-        lib_dir_path = lib_dir_path if lib_dir_path.endswith(os.sep) else lib_dir_path + os.sep
-        lib_dir_paths.append(lib_dir_path)
-      else:
-        lib_dir_path = path.replace(hdfs_path_prefix, '')
-        lib_dir_paths.append(os.path.dirname(lib_dir_path))
-    pass
-  pass
-
-  return lib_dir_paths

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_server.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_server.py
deleted file mode 100644
index 6df6059..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_server.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from mysql_service import mysql_service
-
-class MysqlServer(Script):
-
-  def install(self, env):
-    import params
-    
-    self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action='start')
-
-    File(params.mysql_adduser_path,
-         mode=0755,
-         content=StaticFile('addMysqlUser.sh')
-    )
-
-    cmd = format("bash -x {mysql_adduser_path} {daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {mysql_host[0]}")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
-    )
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    mysql_service(daemon_name=params.daemon_name, action = 'stop')
-
-  def status(self, env):
-    import status_params
-    mysql_service(daemon_name=status_params.daemon_name, action = 'status')
-
-if __name__ == "__main__":
-  MysqlServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_service.py
deleted file mode 100644
index 11bbdd8..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/mysql_service.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-
-def mysql_service(daemon_name=None, action='start'):
-  status_cmd = format('service {daemon_name} status | grep running')
-  cmd = format('service {daemon_name} {action}')
-
-  if action == 'status':
-    Execute(status_cmd)
-  elif action == 'stop':
-    Execute(cmd,
-            logoutput = True,
-            only_if = status_cmd
-    )
-  elif action == 'start':
-    # required for running hive
-    replace_bind_address = format("sed -i 's|^bind-address[ \t]*=.*|bind-address = 0.0.0.0|' {mysql_configname}")  
-    Execute(replace_bind_address)
-    
-    Execute(cmd,
-      logoutput = True,
-      not_if = status_cmd
-    )
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
deleted file mode 100644
index 835e018..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import status_params
-import os
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
-
-phd_stack_version = config['hostLevelParams']['stack_version']
-
-#hadoop params
-if rpm_version:
-  hadoop_bin_dir = "/usr/phd/current/hadoop-client/bin"
-  hadoop_home = '/usr/phd/current/hadoop-client'
-  hadoop_streeming_jars = "/usr/phd/current/hadoop-mapreduce-client/hadoop-streaming-*.jar"
-  hive_bin = '/usr/phd/current/hive-client/bin'
-  hive_lib = '/usr/phd/current/hive-client/lib'
-  tez_local_api_jars = '/usr/phd/current/tez-client/tez*.jar'
-  tez_local_lib_jars = '/usr/phd/current/tez-client/lib/*.jar'
-  tez_tar_file = "/usr/phd/current/tez-client/lib/tez*.tar.gz"
-  pig_tar_file = '/usr/phd/current/pig-client/pig.tar.gz'
-  hive_tar_file = '/usr/phd/current/hive-client/hive.tar.gz'
-  sqoop_tar_file = '/usr/phd/current/sqoop-client/sqoop*.tar.gz'
-
-  hcat_lib = '/usr/phd/current/hive/hive-hcatalog/share/hcatalog'
-  webhcat_bin_dir = '/usr/phd/current/hive-hcatalog/sbin'
-
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  hive_bin = '/usr/lib/hive/bin'
-  hive_lib = '/usr/lib/hive/lib/'
-  tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-  tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-  tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
-  pig_tar_file = '/usr/share/PHD-webhcat/pig.tar.gz'
-  hive_tar_file = '/usr/share/PHD-webhcat/hive.tar.gz'
-  sqoop_tar_file = '/usr/share/PHD-webhcat/sqoop*.tar.gz'
-
-  if str(phd_stack_version).startswith('2.0'):
-    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-  # for newer versions
-  else:
-    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hive_conf_dir = "/etc/hive/conf"
-hive_client_conf_dir = "/etc/hive/conf"
-hive_server_conf_dir = '/etc/hive/conf.server'
-
-# for newer versions
-hcat_conf_dir = '/etc/hive-hcatalog/conf'
-config_dir = '/etc/hive-webhcat/conf'
-
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
-hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
-
-hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
-hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
-
-#users
-hive_user = config['configurations']['hive-env']['hive_user']
-#JDBC driver jar name
-hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-if hive_jdbc_driver == "com.mysql.jdbc.Driver":
-  jdbc_jar_name = "mysql-connector-java.jar"
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-elif hive_jdbc_driver == "org.postgresql.Driver":
-  jdbc_jar_name = "postgresql-jdbc.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-  jdbc_jar_name = "ojdbc6.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-check_db_connection_jar_name = "DBConnectionVerification.jar"
-check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-
-#common
-hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
-hive_var_lib = '/var/lib/hive'
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
-hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
-hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
-smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-
-#hive_env
-hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
-hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
-hive_pid_dir = status_params.hive_pid_dir
-hive_pid = status_params.hive_pid
-#Default conf dir for client
-hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
-
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_config_dir = hive_server_conf_dir
-else:
-  hive_config_dir = hive_client_conf_dir
-
-#hive-site
-hive_database_name = config['configurations']['hive-env']['hive_database_name']
-
-#Starting hiveserver2
-start_hiveserver2_script = 'startHiveserver2.sh.j2'
-
-##Starting metastore
-start_metastore_script = 'startMetastore.sh'
-hive_metastore_pid = status_params.hive_metastore_pid
-java_share_dir = '/usr/share/java'
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-hdfs_user =  config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-
-target = format("{hive_lib}/{jdbc_jar_name}")
-
-jdk_location = config['hostLevelParams']['jdk_location']
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-
-start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
-start_metastore_path = format("{tmp_dir}/start_metastore_script")
-
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
-java64_home = config['hostLevelParams']['java_home']
-
-##### MYSQL
-
-db_name = config['configurations']['hive-env']['hive_database_name']
-mysql_user = "mysql"
-mysql_group = 'mysql'
-mysql_host = config['clusterHostInfo']['hive_mysql_host']
-
-mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
-
-##### POSTGRES
-postgresql_adduser_file = "addPostgreSQLUser.sh"
-postgresql_adduser_path = format("{tmp_dir}/{postgresql_adduser_file}")
-postgresql_host = config['clusterHostInfo']['hive_postgresql_host']
-postgresql_pghba_conf_path = "/var/lib/pgsql/data/pg_hba.conf"
-postgresql_conf_path = "/var/lib/pgsql/data/postgresql.conf"
-postgresql_daemon_name = status_params.postgresql_daemon_name
-
-######## Metastore Schema
-init_metastore_schema = True
-
-########## HCAT
-
-hcat_dbroot = hcat_lib
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-hcat_env_sh_template = config['configurations']['hcat-env']['content']
-
-#hive-log4j.properties.template
-if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
-  log4j_props = config['configurations']['hive-log4j']['content']
-else:
-  log4j_props = None
-
-#hive-exec-log4j.properties.template
-if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
-  log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
-else:
-  log4j_exec_props = None
-
-daemon_name = status_params.daemon_name
-hive_env_sh_template = config['configurations']['hive-env']['content']
-
-hive_hdfs_user_dir = format("/user/{hive_user}")
-hive_hdfs_user_mode = 0700
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
-#for create_hdfs_directory
-hostname = config["hostname"]
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-
-# Tez libraries
-tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-tez_user = config['configurations']['tez-env']['tez_user']
-
-if System.get_instance().os_family == "ubuntu":
-  mysql_configname = '/etc/mysql/my.cnf'
-else:
-  mysql_configname = '/etc/my.cnf'
-
-# Hive security
-hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
-
-mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
-
-# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
-# trying to install mysql-connector-java upon them can cause packages to conflict.
-if os.path.exists(mysql_jdbc_driver_jar):
-  hive_exclude_packages = ['mysql-connector-java']
-else:  
-  hive_exclude_packages = []
-
-########################################################
-########### WebHCat related params #####################
-########################################################
-
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.hcat_pid_dir
-
-webhcat_pid_file = status_params.webhcat_pid_file
-
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-security_param = "true" if security_enabled else "false"
-
-if str(phd_stack_version).startswith('2.0') or str(phd_stack_version).startswith('2.1'):
-  app_dir_files = {tez_local_api_jars:None}
-else:
-  app_dir_files = {
-              tez_local_api_jars:None,
-              tez_tar_file:"tez.tar.gz"
-  }
-
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_server.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_server.py
deleted file mode 100644
index 6732573..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_server.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from postgresql_service import postgresql_service
-
-class PostgreSQLServer(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    # init the database, the ':' makes the command always return 0 in case the database has
-    # already been initialized when the postgresql server colocates with ambari server
-    Execute(format("service {postgresql_daemon_name} initdb || :"))
-
-    # update the configuration files
-    self.update_pghda_conf(env)
-    self.update_postgresql_conf(env)
-
-    # Reload the settings and start the postgresql server for the changes to take effect
-    # Note: Don't restart the postgresql server because when Ambari server and the hive metastore on the same machine,
-    # they will share the same postgresql server instance. Restarting the postgresql database may cause the ambari server database connection lost
-    postgresql_service(postgresql_daemon_name=params.postgresql_daemon_name, action = 'reload')
-
-    # ensure the postgresql server is started because the add hive metastore user requires the server is running.
-    self.start(env)
-
-    # create the database and hive_metastore_user
-    File(params.postgresql_adduser_path,
-         mode=0755,
-         content=StaticFile(format("{postgresql_adduser_file}"))
-    )
-
-    cmd = format("bash -x {postgresql_adduser_path} {postgresql_daemon_name} {hive_metastore_user_name} {hive_metastore_user_passwd!p} {db_name}")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
-    )
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-
-    postgresql_service(postgresql_daemon_name=params.postgresql_daemon_name, action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    postgresql_service(postgresql_daemon_name=params.postgresql_daemon_name, action = 'stop')
-
-  def status(self, env):
-    import status_params
-    postgresql_service(postgresql_daemon_name=status_params.postgresql_daemon_name, action = 'status')
-
-  def update_postgresql_conf(self, env):
-    import params
-    env.set_params(params)
-
-    # change the listen_address to *
-    Execute(format("sed -i '/^[[:space:]]*listen_addresses[[:space:]]*=.*/d' {postgresql_conf_path}"))
-    Execute(format("echo \"listen_addresses = '*'\" | tee -a {postgresql_conf_path}"))
-
-    # change the standard_conforming_string to off
-    Execute(format("sed -i '/^[[:space:]]*standard_conforming_strings[[:space:]]*=.*/d' {postgresql_conf_path}"))
-    Execute(format("echo \"standard_conforming_strings = off\" | tee -a {postgresql_conf_path}"))
-
-  def update_pghda_conf(self, env):
-    import params
-    env.set_params(params)
-
-    # trust hive_metastore_user and postgres locally
-    Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*all.*$/s/^/#/' {postgresql_pghba_conf_path}"))
-    Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*postgres.*$/d' {postgresql_pghba_conf_path}"))
-    Execute(format("sed -i '/^[[:space:]]*local[[:space:]]*all[[:space:]]*\"{hive_metastore_user_name}\".*$/d' {postgresql_pghba_conf_path}"))
-    Execute(format("echo \"local   all   postgres   trust\" | tee -a {postgresql_pghba_conf_path}"))
-    Execute(format("echo \"local   all   \\\"{hive_metastore_user_name}\\\" trust\" | tee -a {postgresql_pghba_conf_path}"))
-
-    # trust hive_metastore_user and postgres via local interface
-    Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*all.*$/s/^/#/' {postgresql_pghba_conf_path}"))
-    Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*postgres.*$/d' {postgresql_pghba_conf_path}"))
-    Execute(format("sed -i '/^[[:space:]]*host[[:space:]]*all[[:space:]]*\"{hive_metastore_user_name}\".*$/d' {postgresql_pghba_conf_path}"))
-    Execute(format("echo \"host    all   postgres         0.0.0.0/0       trust\" | tee -a {postgresql_pghba_conf_path}"))
-    Execute(format("echo \"host    all   \\\"{hive_metastore_user_name}\\\"         0.0.0.0/0       trust\" | tee -a {postgresql_pghba_conf_path}"))
-
-if __name__ == "__main__":
-  PostgreSQLServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_service.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_service.py
deleted file mode 100644
index cc7b4cc..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/postgresql_service.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-
-def postgresql_service(postgresql_daemon_name=None, action='start'):
-  status_cmd = format('service {postgresql_daemon_name} status | grep running')
-  cmd = format('service {postgresql_daemon_name} {action}')
-
-  if action == 'status':
-    Execute(status_cmd)
-  elif action == 'stop':
-    Execute(cmd,
-            logoutput = True,
-            only_if = status_cmd
-    )
-  elif action == 'start':
-    Execute(cmd,
-      logoutput = True,
-      not_if = status_cmd
-    )
-  else:
-    Execute(cmd, logoutput = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/service_check.py
deleted file mode 100644
index b75578b..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/service_check.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import socket
-import sys
-
-from hcat_service_check import hcat_service_check
-from webhcat_service_check import webhcat_service_check
-
-class HiveServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    address=format("{hive_server_host}")
-    port=int(format("{hive_server_port}"))
-    print "Test connectivity to hive server"
-    if check_thrift_port_sasl(address, port, security_enabled=params.security_enabled):
-      print "Successfully connected to %s on port %s" % (address, port)
-    else:
-      print "Connection to %s on port %s failed" % (address, port)
-      exit(1)
-
-    hcat_service_check()
-    webhcat_service_check()
-
-if __name__ == "__main__":
-  HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/status_params.py
deleted file mode 100644
index 3e50761..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/status_params.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
-hive_pid = 'hive-server.pid'
-
-hive_metastore_pid = 'hive.pid'
-
-hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
-webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
-
-if System.get_instance().os_family == "suse" or System.get_instance().os_family == "ubuntu":
-  daemon_name = 'mysql'
-else:
-  daemon_name = 'mysqld'
-
-postgresql_daemon_name = "postgresql"

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat.py
deleted file mode 100644
index c6f41dd..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat.py
+++ /dev/null
@@ -1,131 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-import sys
-import os.path
-import glob
-
-
-def webhcat():
-  import params
-
-  params.HdfsDirectory(params.webhcat_apps_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=0755
-  )
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
-  Directory(params.templeton_pid_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.templeton_log_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.config_dir,
-            recursive=True,
-            owner=params.webhcat_user,
-            group=params.user_group)
-
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['webhcat-site'],
-            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
-            owner=params.webhcat_user,
-            group=params.user_group,
-  )
-
-  File(format("{config_dir}/webhcat-env.sh"),
-       owner=params.webhcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.webhcat_env_sh_template)
-  )
-
-  if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-  else:
-    kinit_if_needed = ""
-
-  if kinit_if_needed:
-    Execute(kinit_if_needed,
-            user=params.webhcat_user,
-            path='/bin'
-    )
-
-  CopyFromLocal(params.hadoop_streeming_jars,
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_bin_dir=params.hadoop_bin_dir,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
-
-  if (os.path.isfile(params.pig_tar_file)):
-    CopyFromLocal(params.pig_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-  if (os.path.isfile(params.hive_tar_file)):
-    CopyFromLocal(params.hive_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-  if (len(glob.glob(params.sqoop_tar_file)) > 0):
-    CopyFromLocal(params.sqoop_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_server.py
deleted file mode 100644
index 088cb41..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_server.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-
-from webhcat import webhcat
-from webhcat_service import webhcat_service
-
-class WebHCatServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    webhcat_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.webhcat_pid_file)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service.py
deleted file mode 100644
index 41fb529..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service(action='start'):
-  import params
-
-  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
-
-  if action == 'start':
-    demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
-            user=params.webhcat_user,
-            not_if=no_op_test
-    )
-  elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
-    Execute(format('rm -f {webhcat_pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service_check.py
deleted file mode 100644
index 8d15e47..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/webhcat_service_check.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-def webhcat_service_check():
-  import params
-  File(format("{tmp_dir}/templetonSmoke.sh"),
-       content= StaticFile('templetonSmoke.sh'),
-       mode=0755
-  )
-
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-               " {security_param} {kinit_path_local}",
-               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-  Execute(cmd,
-          tries=3,
-          try_sleep=5,
-          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-          logoutput=True)
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/templates/startHiveserver2.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/templates/startHiveserver2.sh.j2 b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/templates/startHiveserver2.sh.j2
deleted file mode 100644
index 641fb32..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/templates/startHiveserver2.sh.j2
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.dir=$5"
-{% if hive_authorization_enabled == True and str(phd_stack_version).startswith('2.1') %}
-# HiveServer 2 -hiveconf options
-HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator -hiveconf hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory "
-{% endif %}
-
-HIVE_CONF_DIR=$4 {{hive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " ${HIVE_SERVER2_OPTS} > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/930d4499/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/configuration/nagios-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/configuration/nagios-env.xml b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/configuration/nagios-env.xml
deleted file mode 100644
index fad8374..0000000
--- a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/configuration/nagios-env.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <property>
-    <name>nagios_user</name>
-    <value>nagios</value>
-    <property-type>USER</property-type>
-    <description>Nagios Username.</description>
-  </property>
-  <property>
-    <name>nagios_group</name>
-    <value>nagios</value>
-    <property-type>GROUP</property-type>
-    <description>Nagios Group.</description>
-  </property>
-  <property>
-    <name>nagios_web_login</name>
-    <value>nagiosadmin</value>
-    <description>Nagios web user.</description>
-  </property>
-  <property require-input = "true">
-    <name>nagios_web_password</name>
-    <value></value>
-    <property-type>PASSWORD</property-type>
-    <description>Nagios Admin Password.</description>
-  </property>
-  <property require-input = "true">
-    <name>nagios_contact</name>
-    <value></value>
-    <description>Hadoop Admin Email.</description>
-  </property>
-
-</configuration>