You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by wu...@apache.org on 2022/10/28 06:20:39 UTC

[ambari] branch trunk updated: AMBARI-25766: Upgrade Hive for BIGTOP to be compatible with bigtop-select (#3435)

This is an automated email from the ASF dual-hosted git repository.

wuzhiguo pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new ae9cbef792 AMBARI-25766: Upgrade Hive for BIGTOP to be compatible with bigtop-select (#3435)
ae9cbef792 is described below

commit ae9cbef7920ad6f6d43d9498c655d61ba33f7596
Author: Yu Hou <52...@qq.com>
AuthorDate: Fri Oct 28 14:20:29 2022 +0800

    AMBARI-25766: Upgrade Hive for BIGTOP to be compatible with bigtop-select (#3435)
---
 .../BIGTOP/3.2.0/properties/stack_packages.json    | 32 +++++++-
 .../3.2.0/services/HIVE/configuration/hive-env.xml | 10 +--
 .../services/HIVE/configuration/webhcat-site.xml   | 19 +----
 .../stacks/BIGTOP/3.2.0/services/HIVE/metainfo.xml |  7 --
 .../3.2.0/services/HIVE/package/scripts/hcat.py    |  4 +-
 .../HIVE/package/scripts/hcat_service_check.py     |  2 +-
 .../3.2.0/services/HIVE/package/scripts/hive.py    | 85 +++++----------------
 .../HIVE/package/scripts/hive_metastore.py         |  6 +-
 .../HIVE/package/scripts/hive_server_upgrade.py    | 14 ++--
 .../services/HIVE/package/scripts/hive_service.py  | 12 +--
 .../3.2.0/services/HIVE/package/scripts/params.py  | 89 +++++++++++-----------
 .../HIVE/package/scripts/setup_ranger_hive.py      |  2 +-
 .../services/HIVE/package/scripts/status_params.py | 48 ++----------
 .../3.2.0/services/HIVE/package/scripts/webhcat.py | 16 ++--
 .../HIVE/package/scripts/webhcat_service.py        |  4 +-
 .../HIVE/package/templates/startHiveserver2.sh.j2  |  2 +-
 16 files changed, 135 insertions(+), 217 deletions(-)

diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
index 01a94b0629..08742e8c17 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
@@ -125,7 +125,8 @@
         "HIVE_METASTORE": {
           "STACK-SELECT-PACKAGE": "hive-metastore",
           "INSTALL": [
-            "hive-metastore"
+            "hive-metastore",
+            "hive-client"
           ],
           "PATCH": [
             "hive-metastore"
@@ -137,7 +138,8 @@
         "HIVE_SERVER": {
           "STACK-SELECT-PACKAGE": "hive-server2",
           "INSTALL": [
-            "hive-server2"
+            "hive-server2",
+            "hive-client"
           ],
           "PATCH": [
             "hive-server2"
@@ -157,6 +159,32 @@
           "STANDARD": [
             "hadoop-client"
           ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat",
+            "hive-client"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat",
+            "hive-client"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
         }
       },
       "KAFKA": {
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/hive-env.xml
index 15b941a18e..d04b194d87 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/hive-env.xml
@@ -399,18 +399,18 @@ export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS{{heap_dump_opts}}"
 # Set HADOOP_HOME to point to a specific hadoop install directory
 HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 
-export HIVE_HOME=${HIVE_HOME:-{{hive_home_dir}}}
+export HIVE_HOME=${HIVE_HOME:-{{hive_home}}}
 
 # Hive Configuration Directory can be controlled by:
-export HIVE_CONF_DIR=${HIVE_CONF_DIR:-{{hive_config_dir}}}
+export HIVE_CONF_DIR=${HIVE_CONF_DIR:-{{hive_conf_dir}}}
 
 # Folder containing extra libraries required for hive compilation/execution can be controlled by:
 if [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
   export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}
-elif [ -d "/usr/lib/hive-hcatalog/" ]; then
-  export HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar
+elif [ -d "{{hive_hcatalog_home}}" ]; then
+  export HIVE_AUX_JARS_PATH={{hive_hcatalog_home}}/share/hcatalog/hive-hcatalog-core-*.jar
 else
-  export HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar
+  export HIVE_AUX_JARS_PATH={{hive_hcatalog_home}}/share/hcatalog/hcatalog-core.jar
 fi
 export METASTORE_PORT={{hive_metastore_port}}
 
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/webhcat-site.xml
index 89c5e7d5e4..4458ecf0d6 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/webhcat-site.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/configuration/webhcat-site.xml
@@ -35,13 +35,13 @@ limitations under the License.
   </property>
   <property>
     <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <value>{{hive_hcatalog_home}}/share/webhcat/svr/webhcat.jar</value>
     <description>The path to the Templeton jar file.</description>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
     <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
+    <value>{{zk_home}}/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
     <on-ambari-upgrade add="true"/>
   </property>
@@ -57,21 +57,6 @@ limitations under the License.
     <description>The path to the Python executable.</description>
     <on-ambari-upgrade add="true"/>
   </property>
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
   <property>
     <name>templeton.hcat</name>
     <value>/usr/bin/hcat</value>
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/metainfo.xml
index acb9787d79..94d8166b51 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/metainfo.xml
@@ -179,13 +179,6 @@
                                 <enabled>true</enabled>
                             </auto-deploy>
                         </dependency>
-                        <dependency>
-                            <name>PIG/PIG</name>
-                            <scope>host</scope>
-                            <auto-deploy>
-                                <enabled>true</enabled>
-                            </auto-deploy>
-                        </dependency>
                     </dependencies>
                     <commandScript>
                         <script>scripts/webhcat_server.py</script>
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat.py
index acca37393b..0963102215 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat.py
@@ -55,7 +55,7 @@ def hcat():
   )
 
   XmlConfig("hive-site.xml",
-            conf_dir=params.hive_client_conf_dir,
+            conf_dir=params.hive_conf_dir,
             configurations=params.config['configurations']['hive-site'],
             configuration_attributes=params.config['configurationAttributes']['hive-site'],
             owner=params.hive_user,
@@ -70,5 +70,5 @@ def hcat():
 
   # Generate atlas-application.properties.xml file
   if params.enable_atlas_hook:
-    atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename)
+    atlas_hook_filepath = os.path.join(params.hive_conf_dir, params.atlas_hook_filename)
     setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat_service_check.py
index cee71e5337..dccb2117b6 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat_service_check.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hcat_service_check.py
@@ -48,7 +48,7 @@ def hcat_service_check():
 
     exec_path = params.execute_path
     if params.version and params.stack_root:
-      upgrade_hive_bin = format("{stack_root}/{version}/hive/bin")
+      upgrade_hive_bin = format("{hive_home}/bin")
       exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + os.pathsep + upgrade_hive_bin
 
     Execute(prepare_cmd,
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive.py
index a20f336a4b..eaf9dd02a6 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive.py
@@ -60,13 +60,13 @@ def hive(name=None):
 
   params.hive_site_config = update_credential_provider_path(params.hive_site_config,
                                                      'hive-site',
-                                                     os.path.join(params.hive_config_dir, 'hive-site.jceks'),
+                                                     os.path.join(params.hive_conf_dir, 'hive-site.jceks'),
                                                      params.hive_user,
                                                      params.user_group
                                                      )
 
   XmlConfig("hive-site.xml",
-            conf_dir = params.hive_config_dir,
+            conf_dir = params.hive_conf_dir,
             configurations = params.hive_site_config,
             configuration_attributes = params.config['configurationAttributes']['hive-site'],
             owner = params.hive_user,
@@ -75,10 +75,10 @@ def hive(name=None):
 
   # Generate atlas-application.properties.xml file
   if params.enable_atlas_hook:
-    atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename)
+    atlas_hook_filepath = os.path.join(params.hive_conf_dir, params.atlas_hook_filename)
     setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
 
-  File(format("{hive_config_dir}/hive-env.sh"),
+  File(format("{hive_conf_dir}/hive-env.sh"),
        owner=params.hive_user,
        group=params.user_group,
        content=InlineTemplate(params.hive_env_sh_template),
@@ -99,7 +99,7 @@ def hive(name=None):
        content=Template("hive.conf.j2")
        )
   if params.security_enabled:
-    File(os.path.join(params.hive_config_dir, 'zkmigrator_jaas.conf'),
+    File(os.path.join(params.hive_conf_dir, 'zkmigrator_jaas.conf'),
          owner=params.hive_user,
          group=params.user_group,
          content=Template("zkmigrator_jaas.conf.j2")
@@ -128,67 +128,20 @@ def setup_hiveserver2():
        content=Template(format('{start_hiveserver2_script}'))
   )
 
-  File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
+  File(os.path.join(params.hive_conf_dir, "hadoop-metrics2-hiveserver2.properties"),
        owner=params.hive_user,
        group=params.user_group,
        content=Template("hadoop-metrics2-hiveserver2.properties.j2"),
        mode=0600
   )
   XmlConfig("hiveserver2-site.xml",
-            conf_dir=params.hive_server_conf_dir,
+            conf_dir=params.hive_conf_dir,
             configurations=params.config['configurations']['hiveserver2-site'],
             configuration_attributes=params.config['configurationAttributes']['hiveserver2-site'],
             owner=params.hive_user,
             group=params.user_group,
             mode=0600)
 
-  # ****** Begin Copy Tarballs ******
-  # *********************************
-  #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
-  if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
-    copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-    copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-  # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
-  # This can use a different source and dest location to account
-  copy_to_hdfs("pig",
-               params.user_group,
-               params.hdfs_user,
-               file_mode=params.tarballs_mode,
-               custom_source_file=params.pig_tar_source,
-               custom_dest_file=params.pig_tar_dest_file,
-               skip=params.sysprep_skip_copy_tarballs_hdfs)
-  copy_to_hdfs("hive",
-               params.user_group,
-               params.hdfs_user,
-               file_mode=params.tarballs_mode,
-               custom_source_file=params.hive_tar_source,
-               custom_dest_file=params.hive_tar_dest_file,
-               skip=params.sysprep_skip_copy_tarballs_hdfs)
-
-  wildcard_tarballs = ["sqoop", "hadoop_streaming"]
-  for tarball_name in wildcard_tarballs:
-    source_file_pattern = eval("params." + tarball_name + "_tar_source")
-    dest_dir = eval("params." + tarball_name + "_tar_dest_dir")
-
-    if source_file_pattern is None or dest_dir is None:
-      continue
-
-    source_files = glob.glob(source_file_pattern) if "*" in source_file_pattern else [source_file_pattern]
-    for source_file in source_files:
-      src_filename = os.path.basename(source_file)
-      dest_file = os.path.join(dest_dir, src_filename)
-
-      copy_to_hdfs(tarball_name,
-                   params.user_group,
-                   params.hdfs_user,
-                   file_mode=params.tarballs_mode,
-                   custom_source_file=source_file,
-                   custom_dest_file=dest_file,
-                   skip=params.sysprep_skip_copy_tarballs_hdfs)
-  # ******* End Copy Tarballs *******
-  # *********************************
-
   # if warehouse directory is in DFS
   if not params.whs_dir_protocol or params.whs_dir_protocol == urlparse(params.default_fs).scheme:
     if not is_empty(params.tez_hook_proto_base_directory):
@@ -375,14 +328,14 @@ def setup_metastore():
     hivemetastore_site_config = get_config("hivemetastore-site")
     if hivemetastore_site_config:
       XmlConfig("hivemetastore-site.xml",
-                conf_dir=params.hive_server_conf_dir,
+                conf_dir=params.hive_conf_dir,
                 configurations=params.config['configurations']['hivemetastore-site'],
                 configuration_attributes=params.config['configurationAttributes']['hivemetastore-site'],
                 owner=params.hive_user,
                 group=params.user_group,
                 mode=0600)
 
-  File(os.path.join(params.hive_server_conf_dir, "hadoop-metrics2-hivemetastore.properties"),
+  File(os.path.join(params.hive_conf_dir, "hadoop-metrics2-hivemetastore.properties"),
        owner=params.hive_user,
        group=params.user_group,
        content=Template("hadoop-metrics2-hivemetastore.properties.j2"),
@@ -438,16 +391,16 @@ def create_hive_metastore_schema():
     Logger.info("Sys DB is already created")
     return
 
-  create_hive_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                  "{hive_schematool_bin}/schematool -initSchema "
+  create_hive_schema_cmd = format("export HIVE_CONF_DIR={hive_conf_dir} ; "
+                                  "{hive_bin_dir}/schematool -initSchema "
                                   "-dbType hive "
                                   "-metaDbType {hive_metastore_db_type} "
                                   "-userName {hive_metastore_user_name} "
                                   "-passWord {hive_metastore_user_passwd!p} "
                                   "-verbose")
 
-  check_hive_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                          "{hive_schematool_bin}/schematool -info "
+  check_hive_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_conf_dir} ; "
+                                          "{hive_bin_dir}/schematool -info "
                                           "-dbType hive "
                                           "-metaDbType {hive_metastore_db_type} "
                                           "-userName {hive_metastore_user_name} "
@@ -487,14 +440,14 @@ def create_metastore_schema():
     Logger.info("Skipping creation of Hive Metastore schema as host is sys prepped")
     return
 
-  create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                             "{hive_schematool_bin}/schematool -initSchema "
+  create_schema_cmd = format("export HIVE_CONF_DIR={hive_conf_dir} ; "
+                             "{hive_bin_dir}/schematool -initSchema "
                              "-dbType {hive_metastore_db_type} "
                              "-userName {hive_metastore_user_name} "
                              "-passWord {hive_metastore_user_passwd!p} -verbose")
 
-  check_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
-                                    "{hive_schematool_bin}/schematool -info "
+  check_schema_created_cmd = as_user(format("export HIVE_CONF_DIR={hive_conf_dir} ; "
+                                    "{hive_bin_dir}/schematool -info "
                                     "-dbType {hive_metastore_db_type} "
                                     "-userName {hive_metastore_user_name} "
                                     "-passWord {hive_metastore_user_passwd!p} -verbose"), params.hive_user)
@@ -644,14 +597,14 @@ def jdbc_connector(target, hive_previous_jdbc_jar):
 
       Execute(untar_sqla_type2_driver, sudo = True)
 
-      Execute(format("yes | {sudo} cp {jars_path_in_archive} {hive_lib}"))
+      Execute(format("yes | {sudo} cp {jars_path_in_archive} {hive_lib_dir}"))
 
       Directory(params.jdbc_libs_dir,
                 create_parents = True)
 
       Execute(format("yes | {sudo} cp {libs_path_in_archive} {jdbc_libs_dir}"))
 
-      Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib}/*"))
+      Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib_dir}/*"))
 
     else:
       Execute(('cp', '--remove-destination', params.downloaded_custom_connector, target),
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_metastore.py
index 90754f740f..0bf36ec52c 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_metastore.py
@@ -153,7 +153,7 @@ class HiveMetastore(Script):
 
         Execute(format("yes | {sudo} cp {libs_in_hive_lib} {target_native_libs_directory}"))
 
-        Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib}/*"))
+        Execute(format("{sudo} chown -R {hive_user}:{user_group} {hive_lib_dir}/*"))
       else:
         # copy the JDBC driver from the older metastore location to the new location only
         # if it does not already exist
@@ -164,12 +164,12 @@ class HiveMetastore(Script):
       File(target_directory_and_filename, mode = 0644)
 
     # build the schema tool command
-    binary = format("{hive_schematool_ver_bin}/schematool")
+    binary = format("{hive_bin_dir}/schematool")
 
     # the conf.server directory changed locations between stack versions
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
-    schematool_hive_server_conf_dir = params.hive_server_conf_dir
+    schematool_hive_server_conf_dir = params.hive_conf_dir
 
     upgrade_from_version = upgrade_summary.get_source_version("HIVE",
       default_version = params.version_for_stack_feature_checks)
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_server_upgrade.py
index 6523670ed9..920e1ef8c1 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_server_upgrade.py
@@ -62,9 +62,9 @@ def deregister():
     raise Fail('Unable to determine the current HiveServer2 version to deregister.')
 
   # fallback when upgrading because <stack-root>/current/hive-server2/conf/conf may not exist
-  hive_server_conf_dir = params.hive_server_conf_dir
-  if not os.path.exists(hive_server_conf_dir):
-    hive_server_conf_dir = "/etc/hive/conf"
+  hive_conf_dir = params.hive_conf_dir
+  if not os.path.exists(hive_conf_dir):
+    hive_conf_dir = "/etc/hive/conf"
 
   # deregister
   hive_execute_path = params.execute_path
@@ -74,7 +74,7 @@ def deregister():
   if params.downgrade_from_version is not None:
     hive_execute_path = _get_hive_execute_path(params.downgrade_from_version)
 
-  command = format('hive --config {hive_server_conf_dir} --service hiveserver2 --deregister ' + current_hiveserver_version)
+  command = format('hive --config {hive_conf_dir} --service hiveserver2 --deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
 
 
@@ -92,8 +92,8 @@ def _get_hive_execute_path(stack_version_formatted):
   if formatted_stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
     # hive_bin
     new_hive_bin = format('{stack_root}/{stack_version_formatted}/hive/bin')
-    if (os.pathsep + params.hive_bin) in hive_execute_path:
-      hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep + new_hive_bin)
+    if (os.pathsep + params.hive_bin_dir) in hive_execute_path:
+      hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin_dir, os.pathsep + new_hive_bin)
     # hadoop_bin_dir
     new_hadoop_bin = stack_select.get_hadoop_dir_for_stack_version("bin", stack_version_formatted)
     old_hadoop_bin = params.hadoop_bin_dir
@@ -118,7 +118,7 @@ def _get_current_hiveserver_version():
       source_version = params.downgrade_from_version
 
     hive_execute_path = _get_hive_execute_path(source_version)
-    version_hive_bin = params.hive_bin
+    version_hive_bin = params.hive_bin_dir
     formatted_source_version = format_stack_version(source_version)
     if formatted_source_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_source_version):
       version_hive_bin = format('{stack_root}/{source_version}/hive/bin')
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_service.py
index 3c0833bf92..344acb65f3 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/hive_service.py
@@ -46,10 +46,10 @@ def hive_service(name, action='start', upgrade_type=None):
 
   if name == 'metastore':
     pid_file = status_params.hive_metastore_pid
-    cmd = format("{start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.err {pid_file} {hive_server_conf_dir}")
+    cmd = format("{start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.err {pid_file} {hive_conf_dir}")
   elif name == 'hiveserver2':
     pid_file = status_params.hive_pid
-    cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} {tez_conf_dir}")
+    cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err {pid_file} {hive_conf_dir} {tez_conf_dir}")
 
 
     if params.security_enabled:
@@ -61,7 +61,7 @@ def hive_service(name, action='start', upgrade_type=None):
 
   if action == 'start':
     if name == 'hiveserver2':
-      check_fs_root(params.hive_server_conf_dir, params.execute_path)
+      check_fs_root(params.hive_conf_dir, params.execute_path)
 
     daemon_cmd = cmd
     hadoop_home = params.hadoop_home
@@ -75,7 +75,7 @@ def hive_service(name, action='start', upgrade_type=None):
 
       if params.version and params.stack_root:
         hadoop_home = format("{stack_root}/{version}/hadoop")
-        hive_bin = os.path.join(params.hive_bin, hive_bin)
+        hive_bin = os.path.join(params.hive_bin_dir, hive_bin)
       
     Execute(daemon_cmd, 
       user = params.hive_user,
@@ -91,7 +91,7 @@ def hive_service(name, action='start', upgrade_type=None):
 
       if params.hive_jdbc_target is not None:
         validation_called = True
-        validate_connection(params.hive_jdbc_target, params.hive_lib)
+        validate_connection(params.hive_jdbc_target, params.hive_lib_dir)
 
       if not validation_called:
         emessage = "ERROR! DB connection check should be executed at least one time!"
@@ -190,7 +190,7 @@ def wait_for_znode():
   except ComponentIsNotRunning:
     raise Exception(format("HiveServer2 is no longer running, check the logs at {hive_log_dir}"))
   
-  cmd = format("{zk_bin}/zkCli.sh -server {zk_quorum} ls /{hive_server2_zookeeper_namespace} | grep 'serverUri='")
+  cmd = format("{zk_bin_dir}/zkCli.sh -server {zk_quorum} ls /{hive_server2_zookeeper_namespace} | grep 'serverUri='")
   code, out = shell.call(cmd)
   if code == 1:
     raise Fail(format("ZooKeeper node /{hive_server2_zookeeper_namespace} is not ready yet"))
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/params.py
index 6ea2b1d6ea..cbd4fe4c2c 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/params.py
@@ -51,6 +51,8 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
 
 
 host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
@@ -111,15 +113,26 @@ stack_supports_ranger_hive_jdbc_url_change = check_stack_feature(StackFeature.RA
 # component ROLE directory (like hive-metastore or hive-server2-hive)
 component_directory = status_params.component_directory
 
-hadoop_home = '/usr/lib/hadoop'
-hive_bin = '/usr/lib/hive/bin'
-hive_schematool_ver_bin = hive_bin
-hive_schematool_bin = hive_bin
-hive_lib = '/usr/lib/hive/lib'
-hive_version_lib = hive_lib
+# default configuration directories
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+tez_conf_dir = "/etc/tez/conf"
+zk_home = format('/usr/lib/zookeeper')
+
+hive_conf_dir = '/etc/hive/conf'
+hive_home = '/usr/lib/hive'
 hive_var_lib = '/var/lib/hive'
 hive_user_home_dir = "/home/hive"
-zk_bin = format('/usr/lib/zookeeper/bin')
+
+# hadoop parameters for stacks that support rolling_upgrade
+if stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major):
+  hive_home = format("{stack_root}/current/hive-client")
+  zk_home = format("{stack_root}/{stack_version_formatted_major}/usr/lib/zookeeper")
+
+zk_bin_dir = format('{zk_home}/bin')
+hive_bin_dir = format('{hive_home}/bin')
+hive_lib_dir = format('{hive_home}/lib')
 
 # starting on stacks where HSI is supported, we need to begin using the 'hive2' schematool
 hive_server2_hive_dir = None
@@ -145,30 +158,11 @@ limits_conf_dir = "/etc/security/limits.d"
 hive_user_nofile_limit = default("/configurations/hive-env/hive_user_nofile_limit", "32000")
 hive_user_nproc_limit = default("/configurations/hive-env/hive_user_nproc_limit", "16000")
 
-# use the directories from status_params as they are already calculated for
-# the correct stack version
-hadoop_conf_dir = status_params.hadoop_conf_dir
-hadoop_bin_dir = status_params.hadoop_bin_dir
-hive_conf_dir = status_params.hive_conf_dir
-hive_home_dir = status_params.hive_home_dir
-hive_config_dir = status_params.hive_config_dir
-hive_client_conf_dir = status_params.hive_client_conf_dir
-hive_server_conf_dir = status_params.hive_server_conf_dir
-tez_conf_dir = status_params.tez_conf_dir
-
-
 # --- Tarballs ---
 # DON'T CHANGE THESE VARIABLE NAMES
 # Values don't change from those in copy_tarball.py
 hive_tar_source = "{0}/{1}/hive/hive.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN)
-pig_tar_source = "{0}/{1}/pig/pig.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN)
 hive_tar_dest_file = "/{0}/apps/{1}/hive/hive.tar.gz".format(STACK_NAME_PATTERN,STACK_VERSION_PATTERN)
-pig_tar_dest_file = "/{0}/apps/{1}/pig/pig.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)
-
-hadoop_streaming_tar_source = "{0}/{1}/hadoop-mapreduce/hadoop-streaming.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN)
-sqoop_tar_source = "{0}/{1}/sqoop/sqoop.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN)
-hadoop_streaming_tar_dest_dir = "/{0}/apps/{1}/mapreduce/".format(STACK_NAME_PATTERN,STACK_VERSION_PATTERN)
-sqoop_tar_dest_dir = "/{0}/apps/{1}/sqoop/".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)
 
 tarballs_mode = 0444
 
@@ -180,7 +174,7 @@ if check_stack_feature(StackFeature.HIVE_PURGE_TABLE, version_for_stack_feature_
 if check_stack_feature(StackFeature.HIVE_METASTORE_SITE_SUPPORT, version_for_stack_feature_checks):
   hive_metastore_site_supported = True
 
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin_dir + os.pathsep + hadoop_bin_dir
 
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
@@ -248,19 +242,19 @@ elif hive_jdbc_driver == "sap.jdbc4.sqlanywhere.IDriver":
 else: raise Fail(format("JDBC driver '{hive_jdbc_driver}' not supported."))
 
 default_mysql_jar_name = "mysql-connector-java.jar"
-default_mysql_target = format("{hive_lib}/{default_mysql_jar_name}")
-hive_previous_jdbc_jar = format("{hive_lib}/{hive_previous_jdbc_jar_name}")
+default_mysql_target = format("{hive_lib_dir}/{default_mysql_jar_name}")
+hive_previous_jdbc_jar = format("{hive_lib_dir}/{hive_previous_jdbc_jar_name}")
 if not hive_use_existing_db:
   jdbc_jar_name = default_mysql_jar_name
 
 
 downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 
-hive_jdbc_target = format("{hive_lib}/{jdbc_jar_name}")
+hive_jdbc_target = format("{hive_lib_dir}/{jdbc_jar_name}")
 
 # during upgrade / downgrade, use the specific version to copy the JDBC JAR to
 if upgrade_direction:
-  hive_jdbc_target = format("{hive_version_lib}/{jdbc_jar_name}")
+  hive_jdbc_target = format("{hive_lib_dir}/{jdbc_jar_name}")
 
 
 driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
@@ -277,10 +271,10 @@ hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.my
                           "org.postgresql.Driver","oracle.jdbc.driver.OracleDriver","sap.jdbc4.sqlanywhere.IDriver"]
 
 prepackaged_jdbc_name = "ojdbc6.jar"
-prepackaged_ojdbc_symlink = format("{hive_lib}/{prepackaged_jdbc_name}")
+prepackaged_ojdbc_symlink = format("{hive_lib_dir}/{prepackaged_jdbc_name}")
 
 #constants for type2 jdbc
-jdbc_libs_dir = format("{hive_lib}/native/lib64")
+jdbc_libs_dir = format("{hive_lib_dir}/native/lib64")
 lib_dir_available = os.path.exists(jdbc_libs_dir)
 
 if sqla_db_used:
@@ -342,14 +336,14 @@ hive_pid = status_params.hive_pid
 # hive_interactive_pid = status_params.hive_interactive_pid
 
 #Default conf dir for client
-hive_conf_dirs_list = [hive_client_conf_dir]
+hive_conf_dirs_list = [hive_conf_dir]
 
 # These are the folders to which the configs will be written to.
-ranger_hive_ssl_config_file = os.path.join(hive_server_conf_dir, "ranger-policymgr-ssl.xml")
+ranger_hive_ssl_config_file = os.path.join(hive_conf_dir, "ranger-policymgr-ssl.xml")
 if status_params.role == "HIVE_METASTORE" and hive_metastore_hosts is not None and hostname in hive_metastore_hosts:
-  hive_conf_dirs_list.append(hive_server_conf_dir)
+  hive_conf_dirs_list.append(hive_conf_dir)
 elif status_params.role == "HIVE_SERVER" and hive_server_hosts is not None and hostname in hive_server_hosts:
-  hive_conf_dirs_list.append(hive_server_conf_dir)
+  hive_conf_dirs_list.append(hive_conf_dir)
 
 #Starting hiveserver2
 start_hiveserver2_script = 'startHiveserver2.sh.j2'
@@ -365,7 +359,7 @@ artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 # Need this for yarn.nodemanager.recovery.dir in yarn-site
 yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 
-jars_in_hive_lib = format("{hive_lib}/*.jar")
+jars_in_hive_lib = format("{hive_lib_dir}/*.jar")
 
 start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
 start_metastore_path = format("{tmp_dir}/start_metastore_script")
@@ -513,15 +507,18 @@ atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'a
 #endregion
 
 ########## HCAT
-webhcat_conf_dir = status_params.webhcat_conf_dir
-hive_apps_whs_dir = hive_metastore_warehouse_dir
+webhcat_conf_dir = "/etc/hive-webhcat/conf"
 hcat_conf_dir = '/etc/hive-hcatalog/conf'
-config_dir = '/etc/hive-webhcat/conf'
+hive_apps_whs_dir = hive_metastore_warehouse_dir
 
 # there are no client versions of these, use server versions directly
-hcat_lib = format('/usr/lib/hive-hcatalog/share/hcatalog')
-webhcat_bin_dir = format('/usr/lib/hive-hcatalog/sbin')
+hive_hcatalog_home = '/usr/lib/hive-hcatalog'
+# hadoop parameters for stacks that support rolling_upgrade
+if stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major):
+  hive_hcatalog_home = format('{stack_root}/current/hive-webhcat')
 
+hcat_lib = format('{hive_hcatalog_home}/share/hcatalog')
+webhcat_bin_dir = format('{hive_hcatalog_home}/sbin')
 webhcat_apps_dir = "/apps/webhcat"
 
 templeton_port = config['configurations']['webhcat-site']['templeton.port']
@@ -628,7 +625,7 @@ if security_enabled:
   yarn_kinit_cmd = format("{kinit_path_local} -kt {yarn_keytab} {yarn_principal_name};")
 
 hive_cluster_token_zkstore = default("/configurations/hive-site/hive.cluster.delegation.token.store.zookeeper.znode", None)
-jaas_file = os.path.join(hive_config_dir, 'zkmigrator_jaas.conf')
+jaas_file = os.path.join(hive_conf_dir, 'zkmigrator_jaas.conf')
 hive_zk_namespace = default("/configurations/hive-site/hive.zookeeper.namespace", None)
 
 zk_principal_name = default("/configurations/zookeeper-env/zookeeper_principal_name", "zookeeper/_HOST@EXAMPLE.COM")
@@ -702,8 +699,8 @@ if enable_ranger_hive:
 
     ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
     ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_jar_name}")
-    ranger_driver_curl_target = format("{hive_lib}/{ranger_jdbc_jar_name}")
-    ranger_previous_jdbc_jar = format("{hive_lib}/{ranger_previous_jdbc_jar_name}")
+    ranger_driver_curl_target = format("{hive_lib_dir}/{ranger_jdbc_jar_name}")
+    ranger_previous_jdbc_jar = format("{hive_lib_dir}/{ranger_previous_jdbc_jar_name}")
     sql_connector_jar = ''
 
   ranger_hive_url = format("{hive_url}/default;principal={hive_principal}") if security_enabled else hive_url
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/setup_ranger_hive.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/setup_ranger_hive.py
index 4736490c4a..22f796a19d 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/setup_ranger_hive.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/setup_ranger_hive.py
@@ -70,7 +70,7 @@ def setup_ranger_hive(upgrade_type = None):
                         params.repo_name, params.hive_ranger_plugin_repo,
                         params.ranger_env, params.ranger_plugin_properties,
                         params.policy_user, params.policymgr_mgr_url,
-                        params.enable_ranger_hive, conf_dict=params.hive_server_conf_dir,
+                        params.enable_ranger_hive, conf_dict=params.hive_conf_dir,
                         component_user=params.hive_user, component_group=params.user_group, cache_service_list=['hiveServer2'],
                         plugin_audit_properties=params.config['configurations']['ranger-hive-audit'], plugin_audit_attributes=params.config['configurationAttributes']['ranger-hive-audit'],
                         plugin_security_properties=params.config['configurations']['ranger-hive-security'], plugin_security_attributes=params.config['configurationAttributes']['ranger-hive-security'],
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/status_params.py
index a413589a2d..48b88ae1e2 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/status_params.py
@@ -36,36 +36,29 @@ from resource_management.libraries.script.script import Script
 SERVER_ROLE_DIRECTORY_MAP = {
   'HIVE_METASTORE' : 'hive-metastore',
   'HIVE_SERVER' : 'hive-server2',
-  'HIVE_CLIENT' : 'hive-client',
-  'HIVE_SERVER_INTERACTIVE' : 'hive-server2'
+  'HIVE_CLIENT' : 'hive-client'
 }
 
-
 # Either HIVE_METASTORE, HIVE_SERVER, HIVE_CLIENT
 role = default("/role", None)
 component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_CLIENT")
-# component_directory_interactive = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_SERVER_INTERACTIVE")
 
 config = Script.get_config()
 
-
 stack_root = Script.get_stack_root()
 stack_version_unformatted = config['clusterLevelParams']['stack_version']
 stack_version_formatted_major = format_stack_version(stack_version_unformatted)
 
 hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
 hive_pid = format("{hive_pid_dir}/hive-server.pid")
-# hive_interactive_pid = format("{hive_pid_dir}/hive-interactive.pid")
 hive_metastore_pid = format("{hive_pid_dir}/hive.pid")
 
 process_name = 'mysqld'
 
-
 SERVICE_FILE_TEMPLATES = ['/etc/init.d/{0}', '/usr/lib/systemd/system/{0}.service']
 POSSIBLE_DAEMON_NAMES = ['mysql', 'mysqld', 'mariadb']
 
 
-
 # Security related/required params
 hostname = config['agentLevelParams']['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
@@ -74,39 +67,8 @@ tmp_dir = Script.get_tmp_dir()
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hive_user = config['configurations']['hive-env']['hive_user']
 
-# default configuration directories
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
-
-hive_server_conf_dir = "/etc/hive/conf"
-# hive_server_interactive_conf_dir = "/etc/hive_llap/conf"
-tez_conf_dir = "/etc/tez/conf"
-# tez_interactive_conf_dir = "/etc/tez_llap/conf"
-
-# hive_home_dir = format("{stack_root}/current/{component_directory}")
-# hive_conf_dir = format("{stack_root}/current/{component_directory}/conf")
-# hive_client_conf_dir = format("{stack_root}/current/{component_directory}/conf")
-hive_home_dir = '/usr/lib/hive'
-hive_conf_dir = hive_server_conf_dir
-hive_client_conf_dir = hive_server_conf_dir
-
-if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
-  hive_server_conf_dir = format("{stack_root}/current/{component_directory}/conf/")
-  hive_conf_dir = hive_server_conf_dir
-
-# if stack version supports hive serve interactive
-# if check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
-#   hive_server_interactive_conf_dir = format("{stack_root}/current/{component_directory_interactive}/conf_llap/")
-
-hive_config_dir = hive_client_conf_dir
-
-# if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE", "HIVE_SERVER_INTERACTIVE"]:
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  hive_config_dir = hive_server_conf_dir
-  
-stack_name = default("/clusterLevelParams/stack_name", None)
-
-#
-hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+# hcat_pid_dir
+hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
 webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
-webhcat_conf_dir = format("/etc/hive-webhcat/conf")
\ No newline at end of file
+
+stack_name = default("/clusterLevelParams/stack_name", None)
\ No newline at end of file
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat.py
index 0c75673cf4..c48d2dd9c4 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat.py
@@ -50,7 +50,7 @@ def webhcat():
             group=params.user_group,
             create_parents = True)
 
-  Directory(params.config_dir,
+  Directory(params.webhcat_conf_dir,
             create_parents = True,
             owner=params.webhcat_user,
             group=params.user_group,
@@ -64,7 +64,7 @@ def webhcat():
       webhcat_site[prop_name] = webhcat_site[prop_name].replace("_HOST", params.hostname)
 
   XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
+            conf_dir=params.webhcat_conf_dir,
             configurations=webhcat_site,
             configuration_attributes=params.config['configurationAttributes']['webhcat-site'],
             owner=params.webhcat_user,
@@ -91,7 +91,7 @@ def webhcat():
   )
   
 
-  File(format("{config_dir}/webhcat-env.sh"),
+  File(format("{webhcat_conf_dir}/webhcat-env.sh"),
        owner=params.webhcat_user,
        group=params.user_group,
        content=InlineTemplate(params.webhcat_env_sh_template)
@@ -104,22 +104,22 @@ def webhcat():
 
   log4j_webhcat_filename = 'webhcat-log4j.properties'
   if (params.log4j_webhcat_props != None):
-    File(format("{config_dir}/{log4j_webhcat_filename}"),
+    File(format("{webhcat_conf_dir}/{log4j_webhcat_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.webhcat_user,
          content=InlineTemplate(params.log4j_webhcat_props)
     )
-  elif (os.path.exists("{config_dir}/{log4j_webhcat_filename}.template")):
-    File(format("{config_dir}/{log4j_webhcat_filename}"),
+  elif (os.path.exists("{webhcat_conf_dir}/{log4j_webhcat_filename}.template")):
+    File(format("{webhcat_conf_dir}/{log4j_webhcat_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.webhcat_user,
-         content=StaticFile(format("{config_dir}/{log4j_webhcat_filename}.template"))
+         content=StaticFile(format("{webhcat_conf_dir}/{log4j_webhcat_filename}.template"))
     )
 
   # Generate atlas-application.properties.xml file
   if params.enable_atlas_hook:
     # WebHCat uses a different config dir than the rest of the daemons in Hive.
-    atlas_hook_filepath = os.path.join(params.config_dir, params.atlas_hook_filename)
+    atlas_hook_filepath = os.path.join(params.webhcat_conf_dir, params.atlas_hook_filename)
     setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group)
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat_service.py
index cd6b2c6f4e..d933a6c10b 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/scripts/webhcat_service.py
@@ -40,7 +40,7 @@ def webhcat_service(action='start', upgrade_type=None):
     no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
     try:
       Execute(daemon_cmd,
-              environment = { 'HIVE_HOME': params.hive_home_dir },
+              environment = { 'HIVE_HOME': params.hive_home },
               user=params.webhcat_user,
               not_if=no_op_test)
     except:
@@ -91,4 +91,4 @@ def graceful_stop(cmd):
   import params
   daemon_cmd = format('{cmd} stop')
 
-  Execute(daemon_cmd, environment = { 'HIVE_HOME': params.hive_home_dir }, user = params.webhcat_user)
+  Execute(daemon_cmd, environment = { 'HIVE_HOME': params.hive_home }, user = params.webhcat_user)
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/templates/startHiveserver2.sh.j2 b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/templates/startHiveserver2.sh.j2
index 96ffbfcab5..91ca5b3469 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/templates/startHiveserver2.sh.j2
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/HIVE/package/templates/startHiveserver2.sh.j2
@@ -19,5 +19,5 @@
 #
 #
 
-HIVE_CONF_DIR=$4 TEZ_CONF_DIR=$5 {{hive_bin}}/hiveserver2 > $1 2> $2 &
+HIVE_CONF_DIR=$4 TEZ_CONF_DIR=$5 {{hive_bin_dir}}/hiveserver2 > $1 2> $2 &
 echo $!|cat>$3


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ambari.apache.org
For additional commands, e-mail: commits-help@ambari.apache.org