You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ma...@apache.org on 2014/01/17 20:49:22 UTC

[07/12] AMBARI-4336. Move 1.3.4 stack to 1.3.3 using the python libraries. (mahadev)

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_namenode.py
deleted file mode 100644
index d8e191f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_namenode.py
+++ /dev/null
@@ -1,192 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from utils import service
-from utils import hdfs_directory
-import urlparse
-
-
-def namenode(action=None, format=True):
-  import params
-
-  if action == "configure":
-    create_name_dirs(params.dfs_name_dir)
-
-  if action == "start":
-    if format:
-      format_namenode()
-      pass
-    service(
-      action="start", name="namenode", user=params.hdfs_user,
-      keytab=params.dfs_namenode_keytab_file,
-      create_pid_dir=True,
-      create_log_dir=True,
-      principal=params.dfs_namenode_kerberos_principal
-    )
-
-    # TODO: extract creating of dirs to different services
-    create_app_directories()
-    create_user_directories()
-
-  if action == "stop":
-    service(
-      action="stop", name="namenode", user=params.hdfs_user,
-      keytab=params.dfs_namenode_keytab_file,
-      principal=params.dfs_namenode_kerberos_principal
-    )
-
-  if action == "decommission":
-    decommission()
-
-def create_name_dirs(directories):
-  import params
-
-  dirs = directories.split(",")
-  Directory(dirs,
-            mode=0755,
-            owner=params.hdfs_user,
-            group=params.user_group,
-            recursive=True
-  )
-
-
-def create_app_directories():
-  import params
-
-  hdfs_directory(name="/tmp",
-                 owner=params.hdfs_user,
-                 mode="777"
-  )
-  #mapred directories
-  if params.has_jobtracker:
-    hdfs_directory(name="/mapred",
-                   owner=params.mapred_user
-    )
-    hdfs_directory(name="/mapred/system",
-                   owner=params.mapred_user
-    )
-    #hbase directories
-  if len(params.hbase_master_hosts) != 0:
-    hdfs_directory(name=params.hbase_hdfs_root_dir,
-                   owner=params.hbase_user
-    )
-    hdfs_directory(name=params.hbase_staging_dir,
-                   owner=params.hbase_user,
-                   mode="711"
-    )
-    #hive directories
-  if len(params.hive_server_host) != 0:
-    hdfs_directory(name=params.hive_apps_whs_dir,
-                   owner=params.hive_user,
-                   mode="777"
-    )
-  if len(params.hcat_server_hosts) != 0:
-    hdfs_directory(name=params.webhcat_apps_dir,
-                   owner=params.webhcat_user,
-                   mode="755"
-    )
-  if len(params.hs_host) != 0:
-    hdfs_directory(name=params.mapreduce_jobhistory_intermediate_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-    hdfs_directory(name=params.mapreduce_jobhistory_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-  pass
-
-
-def create_user_directories():
-  import params
-
-  hdfs_directory(name=params.smoke_hdfs_user_dir,
-                 owner=params.smoke_user,
-                 mode=params.smoke_hdfs_user_mode
-  )
-
-  if params.has_hive_server_host:
-    hdfs_directory(name=params.hive_hdfs_user_dir,
-                   owner=params.hive_user,
-                   mode=params.hive_hdfs_user_mode
-    )
-
-  if params.has_hcat_server_host:
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      hdfs_directory(name=params.hcat_hdfs_user_dir,
-                     owner=params.hcat_user,
-                     mode=params.hcat_hdfs_user_mode
-      )
-    hdfs_directory(name=params.webhcat_hdfs_user_dir,
-                   owner=params.webhcat_user,
-                   mode=params.webhcat_hdfs_user_mode
-    )
-
-  if params.has_oozie_server:
-    hdfs_directory(name=params.oozie_hdfs_user_dir,
-                   owner=params.oozie_user,
-                   mode=params.oozie_hdfs_user_mode
-    )
-
-
-def format_namenode(force=None):
-  import params
-
-  mark_dir = params.namenode_formatted_mark_dir
-  dfs_name_dir = params.dfs_name_dir
-  hdfs_user = params.hdfs_user
-  hadoop_conf_dir = params.hadoop_conf_dir
-
-  if True:
-    if force:
-      ExecuteHadoop('namenode -format',
-                    kinit_override=True)
-    else:
-      File('/tmp/checkForFormat.sh',
-           content=StaticFile("checkForFormat.sh"),
-           mode=0755)
-      Execute(format(
-        "sh /tmp/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {mark_dir} "
-        "{dfs_name_dir}"),
-              not_if=format("test -d {mark_dir}"),
-              path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin")
-    Execute(format("mkdir -p {mark_dir}"))
-
-
-def decommission():
-  import params
-
-  hdfs_user = params.hdfs_user
-  conf_dir = params.hadoop_conf_dir
-
-  File(params.exclude_file_path,
-       content=Template("exclude_hosts_list.j2"),
-       owner=hdfs_user,
-       group=params.user_group
-  )
-
-  ExecuteHadoop('dfsadmin -refreshNodes',
-                user=hdfs_user,
-                conf_dir=conf_dir,
-                kinit_override=True)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_snamenode.py
deleted file mode 100644
index a943455..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/hdfs_snamenode.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from utils import service
-from utils import hdfs_directory
-
-
-def snamenode(action=None, format=False):
-  import params
-
-  if action == "configure":
-    Directory(params.fs_checkpoint_dir,
-              recursive=True,
-              mode=0755,
-              owner=params.hdfs_user,
-              group=params.user_group)
-  elif action == "start":
-    service(
-      action=action,
-      name="secondarynamenode",
-      user=params.hdfs_user,
-      create_pid_dir=True,
-      create_log_dir=True,
-      keytab=params.dfs_secondary_namenode_keytab_file,
-      principal=params.dfs_secondary_namenode_kerberos_principal
-    )
-  elif action == "stop":
-    service(
-      action=action,
-      name="secondarynamenode",
-      user=params.hdfs_user,
-      create_pid_dir=True,
-      create_log_dir=True,
-      keytab=params.dfs_secondary_namenode_keytab_file,
-      principal=params.dfs_secondary_namenode_kerberos_principal
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/namenode.py
deleted file mode 100644
index 80700c8..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/namenode.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs_namenode import namenode
-
-
-class NameNode(Script):
-  def install(self, env):
-    import params
-
-    self.install_packages(env)
-    env.set_params(params)
-
-  def start(self, env):
-    import params
-
-    env.set_params(params)
-    self.config(env)
-    namenode(action="start")
-
-  def stop(self, env):
-    import params
-
-    env.set_params(params)
-    namenode(action="stop")
-
-  def config(self, env):
-    import params
-
-    env.set_params(params)
-    namenode(action="configure")
-    pass
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    check_process_status(status_params.namenode_pid_file)
-    pass
-
-  def decommission(self, env):
-    import params
-
-    env.set_params(params)
-    namenode(action="decommission")
-    pass
-
-if __name__ == "__main__":
-  NameNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/params.py
deleted file mode 100644
index 3e0e65b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/params.py
+++ /dev/null
@@ -1,165 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import status_params
-import os
-
-config = Script.get_config()
-
-#security params
-security_enabled = config['configurations']['global']['security_enabled']
-dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']
-dfs_datanode_keytab_file =  config['configurations']['hdfs-site']['dfs.datanode.keytab.file']
-dfs_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.namenode.keytab.file']
-smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
-hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
-
-dfs_datanode_kerberos_principal = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal']
-dfs_journalnode_kerberos_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.principal']
-dfs_secondary_namenode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.internal.spnego.principal']
-dfs_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal']
-dfs_web_authentication_kerberos_principal = config['configurations']['hdfs-site']['dfs.web.authentication.kerberos.principal']
-dfs_secondary_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.principal']
-dfs_journalnode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.internal.spnego.principal']
-
-#exclude file
-hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
-exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
-
-kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-#hosts
-hostname = config["hostname"]
-rm_host = default("/clusterHostInfo/rm_host", [])
-slave_hosts = default("/clusterHostInfo/slave_hosts", [])
-hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
-oozie_servers = default("/clusterHostInfo/oozie_server", [])
-hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
-hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
-hs_host = default("/clusterHostInfo/hs_host", [])
-jtnode_host = default("/clusterHostInfo/jtnode_host", [])
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-nm_host = default("/clusterHostInfo/nm_host", [])
-ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
-journalnode_hosts = default("/clusterHostInfo/journalnode_hosts", [])
-zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", [])
-
-has_ganglia_server = not len(ganglia_server_hosts) == 0
-has_namenodes = not len(namenode_host) == 0
-has_jobtracker = not len(jtnode_host) == 0
-has_resourcemanager = not len(rm_host) == 0
-has_histroryserver = not len(hs_host) == 0
-has_hbase_masters = not len(hbase_master_hosts) == 0
-has_slaves = not len(slave_hosts) == 0
-has_nagios = not len(hagios_server_hosts) == 0
-has_oozie_server = not len(oozie_servers)  == 0
-has_hcat_server_host = not len(hcat_server_hosts)  == 0
-has_hive_server_host = not len(hive_server_host)  == 0
-has_journalnode_hosts = not len(journalnode_hosts)  == 0
-has_zkfc_hosts = not len(zkfc_hosts)  == 0
-
-
-is_namenode_master = hostname in namenode_host
-is_jtnode_master = hostname in jtnode_host
-is_rmnode_master = hostname in rm_host
-is_hsnode_master = hostname in hs_host
-is_hbase_master = hostname in hbase_master_hosts
-is_slave = hostname in slave_hosts
-
-if has_ganglia_server:
-  ganglia_server_host = ganglia_server_hosts[0]
-
-#users and groups
-yarn_user = config['configurations']['global']['yarn_user']
-hbase_user = config['configurations']['global']['hbase_user']
-nagios_user = config['configurations']['global']['nagios_user']
-oozie_user = config['configurations']['global']['oozie_user']
-webhcat_user = config['configurations']['global']['hcat_user']
-hcat_user = config['configurations']['global']['hcat_user']
-hive_user = config['configurations']['global']['hive_user']
-smoke_user =  config['configurations']['global']['smokeuser']
-mapred_user = config['configurations']['global']['mapred_user']
-hdfs_user = status_params.hdfs_user
-
-user_group = config['configurations']['global']['user_group']
-proxyuser_group =  config['configurations']['global']['proxyuser_group']
-nagios_group = config['configurations']['global']['nagios_group']
-smoke_user_group = "users"
-
-#hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
-hadoop_bin = "/usr/lib/hadoop/bin"
-
-hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
-
-dfs_domain_socket_path = "/var/lib/hadoop-hdfs/dn_socket"
-dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
-
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-
-jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']#"/grid/0/hdfs/journal"
-
-# if stack_version[0] == "2":
-#dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-# else:
-dfs_name_dir = config['configurations']['hdfs-site']['dfs.name.dir']#","/tmp/hadoop-hdfs/dfs/name")
-
-namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
-namenode_dirs_stub_filename = "namenode_dirs_created"
-
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']#","/apps/hbase/data")
-hbase_staging_dir = "/apps/hbase/staging"
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] #, "/apps/hive/warehouse")
-webhcat_apps_dir = "/apps/webhcat"
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']#","/app-logs")
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']#","/mr-history/done")
-
-if has_oozie_server:
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
-  oozie_hdfs_user_mode = 775
-if has_hcat_server_host:
-  hcat_hdfs_user_dir = format("/user/{hcat_user}")
-  hcat_hdfs_user_mode = 755
-  webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-  webhcat_hdfs_user_mode = 755
-if has_hive_server_host:
-  hive_hdfs_user_dir = format("/user/{hive_user}")
-  hive_hdfs_user_mode = 700
-smoke_hdfs_user_dir = format("/user/{smoke_user}")
-smoke_hdfs_user_mode = 770
-
-namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
-
-# if stack_version[0] == "2":
-#fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'] #","/tmp/hadoop-hdfs/dfs/namesecondary")
-# else:
-fs_checkpoint_dir = config['configurations']['core-site']['fs.checkpoint.dir']#","/tmp/hadoop-hdfs/dfs/namesecondary")
-
-# if stack_version[0] == "2":
-#dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-# else:
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/service_check.py
deleted file mode 100644
index 5cd264b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/service_check.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-
-class HdfsServiceCheck(Script):
-  def service_check(self, env):
-    import params
-
-    env.set_params(params)
-    unique = get_unique_id_and_date()
-    dir = '/tmp'
-    tmp_file = format("{dir}/{unique}")
-
-    safemode_command = "dfsadmin -safemode get | grep OFF"
-
-    create_dir_cmd = format("fs -mkdir {dir} ; hadoop fs -chmod -R 777 {dir}")
-    test_dir_exists = format("hadoop fs -test -e {dir}")
-    cleanup_cmd = format("fs -rm {tmp_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    create_file_cmd = format(
-      "{cleanup_cmd}; hadoop fs -put /etc/passwd {tmp_file}")
-    test_cmd = format("fs -test -e {tmp_file}")
-    if params.security_enabled:
-      Execute(format(
-        "su - {smoke_user} -c '{kinit_path_local} -kt {smoke_user_keytab} "
-        "{smoke_user}'"))
-    ExecuteHadoop(safemode_command,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=15,
-                  tries=20
-    )
-    ExecuteHadoop(create_dir_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  not_if=test_dir_exists,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5
-    )
-    ExecuteHadoop(create_file_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5
-    )
-    ExecuteHadoop(test_cmd,
-                  user=params.smoke_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5
-    )
-    if params.has_journalnode_hosts:
-      journalnode_port = params.journalnode_port
-      smoke_test_user = params.smoke_user
-      checkWebUIFileName = "checkWebUI.py"
-      checkWebUIFilePath = format("/tmp/{checkWebUIFileName}")
-      comma_sep_jn_hosts = ",".join(params.journalnode_hosts)
-      checkWebUICmd = format(
-        "su - {smoke_test_user} -c 'python {checkWebUIFilePath} -m "
-        "{comma_sep_jn_hosts} -p {journalnode_port}'")
-      File(checkWebUIFilePath,
-           content=StaticFile(checkWebUIFileName))
-
-      Execute(checkWebUICmd,
-              logoutput=True,
-              try_sleep=3,
-              tries=5
-      )
-
-    if params.has_zkfc_hosts:
-      pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
-      pid_file = format("{pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
-      check_zkfc_process_cmd = format(
-        "ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
-      Execute(check_zkfc_process_cmd,
-              logoutput=True,
-              try_sleep=3,
-              tries=5
-      )
-
-
-if __name__ == "__main__":
-  HdfsServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/snamenode.py
deleted file mode 100644
index 8f682ec..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/snamenode.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs_snamenode import snamenode
-
-
-class SNameNode(Script):
-  def install(self, env):
-    import params
-
-    env.set_params(params)
-
-    self.install_packages(env)
-
-
-  def start(self, env):
-    import params
-
-    env.set_params(params)
-
-    self.config(env)
-    snamenode(action="start")
-
-  def stop(self, env):
-    import params
-
-    env.set_params(params)
-
-    snamenode(action="stop")
-
-  def config(self, env):
-    import params
-
-    env.set_params(params)
-
-    snamenode(action="configure")
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-
-    check_process_status(status_params.snamenode_pid_file)
-
-
-if __name__ == "__main__":
-  SNameNode().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/status_params.py
deleted file mode 100644
index 4097373..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/status_params.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
-hdfs_user = config['configurations']['global']['hdfs_user']
-hdp_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
-datanode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
-namenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
-snamenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
-journalnode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
-zkfc_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/utils.py
deleted file mode 100644
index e28d0e9..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/scripts/utils.py
+++ /dev/null
@@ -1,133 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-
-def service(action=None, name=None, user=None, create_pid_dir=False,
-            create_log_dir=False, keytab=None, principal=None):
-  import params
-
-  kinit_cmd = "true"
-  pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
-  pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
-  log_dir = format("{hdfs_log_dir_prefix}/{user}")
-  hadoop_daemon = format(
-    "export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && "
-    "{hadoop_bin}/hadoop-daemon.sh")
-  cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
-
-  if create_pid_dir:
-    Directory(pid_dir,
-              owner=user,
-              recursive=True)
-  if create_log_dir:
-    Directory(log_dir,
-              owner=user,
-              recursive=True)
-
-  if params.security_enabled:
-    principal_replaced = principal.replace("_HOST", params.hostname)
-    kinit_cmd = format("kinit -kt {keytab} {principal_replaced}")
-
-    if name == "datanode":
-      user = "root"
-      pid_file = format(
-        "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
-
-  daemon_cmd = format("{cmd} {action} {name}")
-
-  service_is_up = format(
-    "ls {pid_file} >/dev/null 2>&1 &&"
-    " ps `cat {pid_file}` >/dev/null 2>&1") if action == "start" else None
-
-  Execute(kinit_cmd)
-  Execute(daemon_cmd,
-          user = user,
-          not_if=service_is_up
-  )
-  if action == "stop":
-    File(pid_file,
-         action="delete",
-         ignore_failures=True
-    )
-
-
-def hdfs_directory(name=None, owner=None, group=None,
-                   mode=None, recursive_chown=False, recursive_chmod=False):
-  import params
-
-  dir_exists = format("hadoop fs -ls {name} >/dev/null 2>&1")
-  namenode_safe_mode_off = "hadoop dfsadmin -safemode get|grep 'Safe mode is OFF'"
-
-  stub_dir = params.namenode_dirs_created_stub_dir
-  stub_filename = params.namenode_dirs_stub_filename
-  dir_absent_in_stub = format(
-    "grep -q '^{name}$' {stub_dir}/{stub_filename} > /dev/null 2>&1; test $? -ne 0")
-  record_dir_in_stub = format("echo '{name}' >> {stub_dir}/{stub_filename}")
-  tries = 3
-  try_sleep = 10
-  dfs_check_nn_status_cmd = "true"
-
-  #if params.stack_version[0] == "2":
-  #mkdir_cmd = format("fs -mkdir -p {name}")
-  #else:
-  mkdir_cmd = format("fs -mkdir {name}")
-
-  if params.security_enabled:
-    Execute(format("kinit -kt {hdfs_user_keytab} {hdfs_user}"),
-            user = params.hdfs_user)
-  ExecuteHadoop(mkdir_cmd,
-                try_sleep=try_sleep,
-                tries=tries,
-                not_if=format(
-                  "{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
-                  "{dir_exists} && ! {namenode_safe_mode_off}"),
-                only_if=format(
-                  "su - hdfs -c '{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
-                  "! {dir_exists}'"),
-                conf_dir=params.hadoop_conf_dir,
-                user=params.hdfs_user
-  )
-  Execute(record_dir_in_stub,
-          user=params.hdfs_user,
-          only_if=format("! {dir_absent_in_stub}")
-  )
-
-  recursive = "-R" if recursive_chown else ""
-  perm_cmds = []
-
-  if owner:
-    chown = owner
-    if group:
-      chown = format("{owner}:{group}")
-    perm_cmds.append(format("fs -chown {recursive} {chown} {name}"))
-  if mode:
-    perm_cmds.append(format("fs -chmod {recursive} {mode} {name}"))
-  for cmd in perm_cmds:
-    ExecuteHadoop(cmd,
-                  user=params.hdfs_user,
-                  only_if=format("su - hdfs -c '{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && {namenode_safe_mode_off} && {dir_exists}'"),
-                  try_sleep=try_sleep,
-                  tries=tries,
-                  conf_dir=params.hadoop_conf_dir
-    )
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/templates/exclude_hosts_list.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/templates/exclude_hosts_list.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/templates/exclude_hosts_list.j2
deleted file mode 100644
index c3af46e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HDFS/package/templates/exclude_hosts_list.j2
+++ /dev/null
@@ -1,3 +0,0 @@
-{% for host in hdfs_exclude_file %}
-{{host}}
-{% endfor %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/global.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/global.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/global.xml
deleted file mode 100644
index ae7f586..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/global.xml
+++ /dev/null
@@ -1,148 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <property>
-    <name>hivemetastore_host</name>
-    <value></value>
-    <description>Hive Metastore host.</description>
-  </property>
-  <property>
-    <name>hive_database</name>
-    <value></value>
-    <description>Hive database name.</description>
-  </property>
-  <property>
-    <name>hive_existing_mysql_database</name>
-    <value></value>
-    <description>Hive database name.</description>
-  </property>
-  <property>
-    <name>hive_existing_mysql_host</name>
-    <value></value>
-    <description></description>
-  </property>
-  <property>
-    <name>hive_existing_oracle_database</name>
-    <value></value>
-    <description>Hive database name.</description>
-  </property>
-  <property>
-    <name>hive_existing_oracle_host</name>
-    <value></value>
-    <description></description>
-  </property>
-  <property>
-    <name>hive_ambari_database</name>
-    <value>MySQL</value>
-    <description>Database type.</description>
-  </property>  
-  <property>
-    <name>hive_ambari_host</name>
-    <value></value>
-    <description>Database hostname.</description>
-  </property>
-  <property>
-    <name>hive_database_name</name>
-    <value></value>
-    <description>Database hname</description>
-  </property>    
-  <property>
-    <name>hive_metastore_user_name</name>
-    <value>hive</value>
-    <description>Database username to use to connect to the database.</description>
-  </property>    
-  <property>
-    <name>hive_metastore_user_passwd</name>
-    <value></value>
-    <description>Database password to use to connect to the database.</description>
-  </property>    
-  <property>
-    <name>hive_metastore_port</name>
-    <value>9083</value>
-    <description>Hive Metastore port.</description>
-  </property>    
-  <property>
-    <name>hive_lib</name>
-    <value>/usr/lib/hive/lib/</value>
-    <description>Hive Library.</description>
-  </property>    
-  <property>
-    <name>hive_dbroot</name>
-    <value>/usr/lib/hive/lib/</value>
-    <description>Hive DB Directory.</description>
-  </property>      
-  <property>
-    <name>hive_conf_dir</name>
-    <value>/etc/hive/conf</value>
-    <description>Hive Conf Dir.</description>
-  </property>
-  <property>
-    <name>hive_log_dir</name>
-    <value>/var/log/hive</value>
-    <description>Directory for Hive Log files.</description>
-  </property>
-  <property>
-    <name>hive_pid_dir</name>
-    <value>/var/run/hive</value>
-    <description>Hive PID Dir.</description>
-  </property>
-  <property>
-    <name>mysql_connector_url</name>
-    <value>${download_url}/mysql-connector-java-5.1.18.zip</value>
-    <description>Hive PID Dir.</description>
-  </property>
-  <property>
-    <name>hive_aux_jars_path</name>
-    <value>/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar</value>
-    <description>Hive auxiliary jar path.</description>
-  </property>
-  <property>
-    <name>hive_user</name>
-    <value>hive</value>
-    <description>Hive User.</description>
-  </property>
-
-  <!--HCAT-->
-
-  <property>
-    <name>hcat_log_dir</name>
-    <value>/var/log/webhcat</value>
-    <description>WebHCat Log Dir.</description>
-  </property>
-  <property>
-    <name>hcat_pid_dir</name>
-    <value>/etc/run/webhcat</value>
-    <description>WebHCat Pid Dir.</description>
-  </property>
-  <property>
-    <name>hcat_user</name>
-    <value>hcat</value>
-    <description>HCat User.</description>
-  </property>
-  <property>
-    <name>webhcat_user</name>
-    <value>hcat</value>
-    <description>WebHCat User.</description>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/hive-site.xml
deleted file mode 100644
index 29ed54e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/configuration/hive-site.xml
+++ /dev/null
@@ -1,236 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<configuration>
-  <property>
-    <name>ambari.hive.db.schema.name</name>
-    <value>hive</value>
-    <description>Database name used as the Hive Metastore</description>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionURL</name>
-    <value>jdbc</value>
-    <description>JDBC connect string for a JDBC metastore</description>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionDriverName</name>
-    <value>com.mysql.jdbc.Driver</value>
-    <description>Driver class name for a JDBC metastore</description>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionUserName</name>
-    <value>hive</value>
-    <description>username to use against metastore database</description>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionPassword</name>
-    <value> </value>
-    <description>password to use against metastore database</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.warehouse.dir</name>
-    <value>/apps/hive/warehouse</value>
-    <description>location of default database for the warehouse</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.sasl.enabled</name>
-    <value></value>
-    <description>If true, the metastore thrift interface will be secured with SASL.
-      Clients must authenticate with Kerberos.</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.kerberos.keytab.file</name>
-    <value></value>
-    <description>The path to the Kerberos Keytab file containing the metastore
-      thrift server's service principal.</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.kerberos.principal</name>
-    <value></value>
-    <description>The service principal for the metastore thrift server. The special
-      string _HOST will be replaced automatically with the correct host name.</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.cache.pinobjtypes</name>
-    <value>Table,Database,Type,FieldSchema,Order</value>
-    <description>List of comma separated metastore object types that should be pinned in the cache</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.uris</name>
-    <value>thrift://localhost:9083</value>
-    <description>URI for client to contact metastore server</description>
-  </property>
-
-  <property>
-    <name>hive.semantic.analyzer.factory.impl</name>
-    <value>org.apache.hivealog.cli.HCatSemanticAnalyzerFactory</value>
-    <description>controls which SemanticAnalyzerFactory implemenation class is used by CLI</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.client.socket.timeout</name>
-    <value>60</value>
-    <description>MetaStore Client socket timeout in seconds</description>
-  </property>
-
-  <property>
-    <name>hive.metastore.execute.setugi</name>
-    <value>true</value>
-    <description>In unsecure mode, setting this property to true will cause the metastore to execute DFS operations using the client's reported user and group permissions. Note that this property must be set on both the client and     server sides. Further note that its best effort. If client sets its to true and server sets it to false, client setting will be ignored.</description>
-  </property>
-
-  <property>
-    <name>hive.security.authorization.enabled</name>
-    <value>false</value>
-    <description>enable or disable the hive client authorization</description>
-  </property>
-
-  <property>
-    <name>hive.security.authorization.manager</name>
-    <value>org.apache.hcatalog.security.HdfsAuthorizationProvider</value>
-    <description>the hive client authorization manager class name.
-      The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.  </description>
-  </property>
-
-  <property>
-    <name>hive.server2.enable.doAs</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.hdfs.impl.disable.cache</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.file.impl.disable.cache</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.enforce.bucketing</name>
-    <value>true</value>
-    <description>Whether bucketing is enforced. If true, while inserting into the table, bucketing is enforced.</description>
-  </property>
-
-  <property>
-    <name>hive.enforce.sorting</name>
-    <value>true</value>
-    <description>Whether sorting is enforced. If true, while inserting into the table, sorting is enforced.</description>
-  </property>
-
-  <property>
-    <name>hive.map.aggr</name>
-    <value>true</value>
-    <description>Whether to use map-side aggregation in Hive Group By queries.</description>
-  </property>
-
-  <property>
-    <name>hive.optimize.bucketmapjoin</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.optimize.bucketmapjoin.sortedmerge</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.mapred.reduce.tasks.speculative.execution</name>
-    <value>false</value>
-    <description>Whether speculative execution for reducers should be turned on.</description>
-  </property>
-
-  <property>
-    <name>hive.auto.convert.join</name>
-    <value>true</value>
-    <description>Whether Hive enable the optimization about converting common
-      join into mapjoin based on the input file size.</description>
-  </property>
-
-  <property>
-    <name>hive.auto.convert.sortmerge.join</name>
-    <value>true</value>
-    <description>Will the join be automatically converted to a sort-merge join, if the joined tables pass
-      the criteria for sort-merge join.
-    </description>
-  </property>
-
-  <property>
-    <name>hive.auto.convert.sortmerge.join.noconditionaltask</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.auto.convert.join.noconditionaltask</name>
-    <value>true</value>
-    <description>Whether Hive enable the optimization about converting common join into mapjoin based on the input file
-      size. If this paramater is on, and the sum of size for n-1 of the tables/partitions for a n-way join is smaller than the
-      specified size, the join is directly converted to a mapjoin (there is no conditional task).
-    </description>
-  </property>
-
-  <property>
-    <name>hive.auto.convert.join.noconditionaltask.size</name>
-    <value>1000000000</value>
-    <description>If hive.auto.convert.join.noconditionaltask is off, this parameter does not take affect. However, if it
-      is on, and the sum of size for n-1 of the tables/partitions for a n-way join is smaller than this size, the join is directly
-      converted to a mapjoin(there is no conditional task). The default is 10MB.
-    </description>
-  </property>
-
-  <property>
-    <name>hive.optimize.reducededuplication.min.reducer</name>
-    <value>1</value>
-    <description>Reduce deduplication merges two RSs by moving key/parts/reducer-num of the child RS to parent RS.
-      That means if reducer-num of the child RS is fixed (order by or forced bucketing) and small, it can make very slow, single MR.
-      The optimization will be disabled if number of reducers is less than specified value.
-    </description>
-  </property>
-
-  <property>
-    <name>hive.optimize.mapjoin.mapreduce</name>
-    <value>true</value>
-    <description>If hive.auto.convert.join is off, this parameter does not take
-      affect. If it is on, and if there are map-join jobs followed by a map-reduce
-      job (for e.g a group by), each map-only job is merged with the following
-      map-reduce job.
-    </description>
-  </property>
-
-  <property>
-    <name>hive.mapjoin.bucket.cache.size</name>
-    <value>10000</value>
-    <description>
-      Size per reducer.The default is 1G, i.e if the input size is 10G, it
-      will use 10 reducers.
-    </description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/metainfo.xml
deleted file mode 100644
index 0a0f8fa..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/metainfo.xml
+++ /dev/null
@@ -1,186 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>HIVE</name>
-      <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
-
-        <component>
-          <name>HIVE_METASTORE</name>
-          <category>MASTER</category>
-          <!-- may be 0 if specifying external metastore, how to specify this? -->
-          <cardinality>1</cardinality>
-          <auto-deploy>
-            <enabled>true</enabled>
-            <co-locate>HIVE/HIVE_SERVER</co-locate>
-          </auto-deploy>
-          <commandScript>
-            <script>scripts/hive_metastore.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-
-        <component>
-          <name>HIVE_SERVER</name>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>HIVE/HIVE_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/hive_server.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-        </component>
-
-        <component>
-          <name>MYSQL_SERVER</name>
-          <category>MASTER</category>
-          <!-- may be 0 if specifying external db, how to specify this? -->
-          <cardinality>1</cardinality>
-          <auto-deploy>
-            <enabled>true</enabled>
-            <co-locate>HIVE/HIVE_SERVER</co-locate>
-          </auto-deploy>
-          <commandScript>
-            <script>scripts/mysql_server.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-        </component>
-
-        <component>
-          <name>HIVE_CLIENT</name>
-          <category>CLIENT</category>
-          <cardinality>0+</cardinality>
-          <commandScript>
-            <script>scripts/hive_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osType>any</osType>
-          <packages>
-            <package>
-              <type>rpm</type>
-              <name>hive</name>
-            </package>
-            <package>
-              <type>rpm</type>
-              <name>mysql-connector-java</name>
-            </package>
-            <package>
-              <type>rpm</type>
-              <name>mysql</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osType>centos6</osType>
-          <packages>
-            <package>
-              <type>rpm</type>
-              <name>mysql-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osType>centos5</osType>
-          <packages>
-            <package>
-              <type>rpm</type>
-              <name>mysql-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osType>suse</osType>
-          <packages>
-            <package>
-              <type>rpm</type>
-              <name>mysql-client</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>global</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
-        <component>
-          <name>HCAT</name>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osType>any</osType>
-          <packages>
-            <package>
-              <type>rpm</type>
-              <name>hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <configuration-dependencies>
-        <config-type>global</config-type>
-      </configuration-dependencies>
-
-    </service>
-
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/addMysqlUser.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/addMysqlUser.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/addMysqlUser.sh
deleted file mode 100644
index 8d31b91..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/addMysqlUser.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-mysqldservice=$1
-mysqldbuser=$2
-mysqldbpasswd=$3
-mysqldbhost=$4
-myhostname=$(hostname -f)
-
-service $mysqldservice start
-echo "Adding user $mysqldbuser@$mysqldbhost and $mysqldbuser@localhost"
-mysql -u root -e "CREATE USER '$mysqldbuser'@'$mysqldbhost' IDENTIFIED BY '$mysqldbpasswd';"
-mysql -u root -e "CREATE USER '$mysqldbuser'@'localhost' IDENTIFIED BY '$mysqldbpasswd';"
-mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$mysqldbhost';"
-mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'localhost';"
-if [ '$(mysql -u root -e "select user from mysql.user where user='$mysqldbuser' and host='$myhostname'" | grep "$mysqldbuser")' != '0' ]; then
-  echo "Adding user $mysqldbuser@$myhostname";
-  mysql -u root -e "CREATE USER '$mysqldbuser'@'$myhostname' IDENTIFIED BY '$mysqldbpasswd';";
-  mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$myhostname';";
-fi
-mysql -u root -e "flush privileges;"
-service $mysqldservice stop

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hcatSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hcatSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hcatSmoke.sh
deleted file mode 100644
index 9e7b33f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hcatSmoke.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-export tablename=$1
-
-case "$2" in
-
-prepare)
-  hcat -e "show tables"
-  hcat -e "drop table IF EXISTS ${tablename}"
-  hcat -e "create table ${tablename} ( id INT, name string ) stored as rcfile ;"
-;;
-
-cleanup)
-  hcat -e "drop table IF EXISTS ${tablename}"
-;;
-
-esac

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveSmoke.sh
deleted file mode 100644
index 7e03524..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveSmoke.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-export tablename=$1
-echo "CREATE EXTERNAL TABLE IF NOT EXISTS ${tablename} ( foo INT, bar STRING );" | hive
-echo "DESCRIBE ${tablename};" | hive

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2.sql b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2.sql
deleted file mode 100644
index 99a3865..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2.sql
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-CREATE EXTERNAL TABLE IF NOT EXISTS hiveserver2smoke20408 ( foo INT, bar STRING );
-DESCRIBE hiveserver2smoke20408;

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2Smoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2Smoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2Smoke.sh
deleted file mode 100644
index 051a21e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/hiveserver2Smoke.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-smokeout=`/usr/lib/hive/bin/beeline -u $1 -n fakeuser -p fakepwd -d org.apache.hive.jdbc.HiveDriver -e '!run $2' 2>&1| awk '{print}'|grep Error`
-
-if [ "x$smokeout" == "x" ]; then
-  echo "Smoke test of hiveserver2 passed"
-  exit 0
-else
-  echo "Smoke test of hiveserver2 wasnt passed"
-  echo $smokeout
-  exit 1
-fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/pigSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/pigSmoke.sh
deleted file mode 100644
index 2e90ac0..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/pigSmoke.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-
-A = load 'passwd' using PigStorage(':');
-B = foreach A generate \$0 as id;
-store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startHiveserver2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startHiveserver2.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startHiveserver2.sh
deleted file mode 100644
index fa90c2f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startHiveserver2.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=' ' > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startMetastore.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startMetastore.sh
deleted file mode 100644
index 9350776..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/files/startMetastore.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-HIVE_CONF_DIR=$4 hive --service metastore > $1 2> $2 &
-echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat.py
deleted file mode 100644
index 2993d3a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-
-
-def hcat():
-  import params
-
-  Directory(params.hcat_conf_dir,
-            owner=params.hcat_user,
-            group=params.user_group,
-  )
-
-  Directory(params.hcat_pid_dir,
-            owner=params.webhcat_user,
-            recursive=True
-  )
-
-  hcat_TemplateConfig('hcat-env.sh')
-
-
-def hcat_TemplateConfig(name):
-  import params
-
-  TemplateConfig(format("{hcat_conf_dir}/{name}"),
-                 owner=params.hcat_user,
-                 group=params.user_group
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_client.py
deleted file mode 100644
index 54a8937..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from hcat import hcat
-
-class HCatClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hcat()
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-if __name__ == "__main__":
-  HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_service_check.py
deleted file mode 100644
index 5112e99..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hcat_service_check.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-def hcat_service_check():
-    import params
-
-    unique = get_unique_id_and_date()
-    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
-    test_cmd = format("fs -test -e {output_file}")
-
-    if params.security_enabled:
-      kinit_cmd = format(
-        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
-    else:
-      kinit_cmd = ""
-
-    File('/tmp/hcatSmoke.sh',
-         content=StaticFile("hcatSmoke.sh"),
-         mode=0755
-    )
-
-    prepare_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} prepare")
-
-    Execute(prepare_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-            logoutput=True)
-
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir)
-
-    cleanup_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} cleanup")
-
-    Execute(cleanup_cmd,
-            tries=3,
-            user=params.smokeuser,
-            try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-            logoutput=True
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive.py
deleted file mode 100644
index b37ebb2..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-
-
-def hive(name=None):
-  import params
-
-  if name == 'metastore' or name == 'hiveserver2':
-    hive_config_dir = params.hive_server_conf_dir
-    config_file_mode = 0600
-    jdbc_connector()
-  else:
-    hive_config_dir = params.hive_conf_dir
-    config_file_mode = 0644
-
-  Directory(hive_config_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-
-  XmlConfig("hive-site.xml",
-            conf_dir=hive_config_dir,
-            configurations=params.config['configurations']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=config_file_mode
-  )
-
-  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 "
-               "{jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
-
-  Execute(cmd,
-          not_if=format("[ -f {check_db_connection_jar_name}]"))
-
-  if name == 'metastore':
-    File(params.start_metastore_path,
-         mode=0755,
-         content=StaticFile('startMetastore.sh')
-    )
-
-  elif name == 'hiveserver2':
-    File(params.start_hiveserver2_path,
-         mode=0755,
-         content=StaticFile('startHiveserver2.sh')
-    )
-
-  if name != "client":
-    crt_directory(params.hive_pid_dir)
-    crt_directory(params.hive_log_dir)
-    crt_directory(params.hive_var_lib)
-
-  File(format("{hive_config_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       content=Template('hive-env.sh.j2', conf_dir=hive_config_dir)
-  )
-
-  crt_file(format("{hive_conf_dir}/hive-default.xml.template"))
-  crt_file(format("{hive_conf_dir}/hive-env.sh.template"))
-  crt_file(format("{hive_conf_dir}/hive-exec-log4j.properties.template"))
-  crt_file(format("{hive_conf_dir}/hive-log4j.properties.template"))
-
-
-def crt_directory(name):
-  import params
-
-  Directory(name,
-            recursive=True,
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-
-
-def crt_file(name):
-  import params
-
-  File(name,
-       owner=params.hive_user,
-       group=params.user_group
-  )
-
-
-def jdbc_connector():
-  import params
-
-  if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
-    cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
-
-    Execute(cmd,
-            not_if=format("test -f {target}"),
-            creates=params.target,
-            path=["/bin", "usr/bin/"])
-
-  elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
-    cmd = format(
-      "mkdir -p {artifact_dir} ; curl -kf --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
-      "cp {driver_curl_target} {target}")
-
-    Execute(cmd,
-            not_if=format("test -f {target}"),
-            path=["/bin", "usr/bin/"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_client.py
deleted file mode 100644
index 0a5fb2b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-from resource_management import *
-
-from hive import hive
-
-class HiveClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='client')
-
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_metastore.py
deleted file mode 100644
index c741174..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_metastore.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from hive import hive
-from hive_service import hive_service
-
-class HiveMetastore(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='metastore')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service( 'metastore',
-                   action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'metastore',
-                   action = 'stop'
-    )
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-if __name__ == "__main__":
-  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/92583535/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_server.py
deleted file mode 100644
index 3ad81a1..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.4/services/HIVE/package/scripts/hive_server.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from hive import hive
-from hive_service import hive_service
-
-class HiveServer(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-
-    hive(name='hiveserver2')
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    hive_service( 'hiveserver2',
-                  action = 'start'
-    )
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    hive_service( 'hiveserver2',
-                  action = 'stop'
-    )
-
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{hive_pid_dir}/{hive_pid}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-if __name__ == "__main__":
-  HiveServer().execute()