You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2016/04/08 11:22:15 UTC
ambari git commit: AMBARI-15577: Stack Featurize HIVE service (Juanjo
Marron via jluniya)
Repository: ambari
Updated Branches:
refs/heads/trunk 5772ceb05 -> b18ecf015
AMBARI-15577: Stack Featurize HIVE service (Juanjo Marron via jluniya)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b18ecf01
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b18ecf01
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b18ecf01
Branch: refs/heads/trunk
Commit: b18ecf015ac4651566cbb76fe2338f6bd5101f54
Parents: 5772ceb
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Fri Apr 8 02:22:00 2016 -0700
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Fri Apr 8 02:22:00 2016 -0700
----------------------------------------------------------------------
.../libraries/functions/constants.py | 8 +-
.../libraries/functions/stack_features.py | 33 ++++++++-
.../package/alerts/alert_hive_metastore.py | 8 +-
.../0.12.0.2.0/package/scripts/hcat_client.py | 13 ++--
.../package/scripts/hcat_service_check.py | 4 +-
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 13 ++--
.../0.12.0.2.0/package/scripts/hive_client.py | 7 +-
.../package/scripts/hive_metastore.py | 22 +++---
.../0.12.0.2.0/package/scripts/hive_server.py | 8 +-
.../package/scripts/hive_server_interactive.py | 9 +--
.../package/scripts/hive_server_upgrade.py | 13 ++--
.../0.12.0.2.0/package/scripts/hive_service.py | 10 ++-
.../0.12.0.2.0/package/scripts/params_linux.py | 77 ++++++++++----------
.../0.12.0.2.0/package/scripts/status_params.py | 42 ++++++-----
.../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 11 ++-
.../package/scripts/webhcat_server.py | 7 +-
.../package/scripts/webhcat_service.py | 4 +-
.../HDP/2.0.6/properties/stack_features.json | 33 ++++++++-
.../stacks/2.0.6/HIVE/test_hive_server.py | 14 +---
19 files changed, 209 insertions(+), 127 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 055a576..86dbc48 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -69,4 +69,10 @@ class StackFeature:
OOZIE_ADMIN_USER = "oozie_admin_user"
OOZIE_CREATE_HIVE_TEZ_CONFIGS = "oozie_create_hive_tez_configs"
OOZIE_SETUP_SHARED_LIB = "oozie_setup_shared_lib"
- OOZIE_HOST_KERBEROS = "oozie_host_kerberos"
\ No newline at end of file
+ OOZIE_HOST_KERBEROS = "oozie_host_kerberos"
+ HIVE_METASTORE_UPGRADE_SCHEMA = "hive_metastore_upgrade_schema"
+ HIVE_SERVER_INTERACTIVE = "hive_server_interactive"
+ HIVE_WEBHCAT_SPECIFIC_CONFIGS = "hive_webhcat_specific_configs"
+ HIVE_PURGE_TABLE = "hive_purge_table"
+ HIVE_SERVER2_KERBERIZED_ENV = "hive_server2_kerberized_env"
+ HIVE_ENV_HEAPSIZE = "hive_env_heapsize"
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
index 8d31fdc..7201c36 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
@@ -181,7 +181,38 @@ _DEFAULT_STACK_FEATURES = {
"description": "Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)",
"min_version": "2.0.0.0",
"max_version": "2.2.0.0"
- }
+ },
+ {
+ "name": "hive_metastore_upgrade_schema",
+ "description": "Hive metastore upgrade schema support (AMBARI-11176)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_server_interactive",
+ "description": "Hive server interactive support (AMBARI-15573)",
+ "min_version": "2.5.0.0"
+ },
+ {
+ "name": "hive_webhcat_specific_configs",
+ "description": "Hive webhcat specific configurations support (AMBARI-12364)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_purge_table",
+ "description": "Hive purge table support (AMBARI-12260)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_server2_kerberized_env",
+ "description": "Hive server2 working on kerberized environment (AMBARI-13749)",
+ "min_version": "2.2.3.0",
+ "max_version": "2.2.5.0"
+ },
+ {
+ "name": "hive_env_heapsize",
+ "description": "Hive heapsize property defined in hive-env (AMBARI-12801)",
+ "min_version": "2.2.0.0"
+ }
]
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
index 42485b6..a556410 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
@@ -31,6 +31,10 @@ from resource_management.core.resources import Execute
from ambari_commons.os_check import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+import params
+
+stack_root = params.stack_root
+
OK_MESSAGE = "Metastore OK - Hive command took {0:.3f}s"
CRITICAL_MESSAGE = "Metastore on {0} failed ({1})"
SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
@@ -54,10 +58,10 @@ SMOKEUSER_PRINCIPAL_DEFAULT = 'ambari-qa@EXAMPLE.COM'
SMOKEUSER_SCRIPT_PARAM_KEY = 'default.smoke.user'
SMOKEUSER_DEFAULT = 'ambari-qa'
-HIVE_CONF_DIR = '/usr/hdp/current/hive-metastore/conf/conf.server'
+HIVE_CONF_DIR = format("{stack_root}/current/hive-metastore/conf/conf.server")
HIVE_CONF_DIR_LEGACY = '/etc/hive/conf.server'
-HIVE_BIN_DIR = '/usr/hdp/current/hive-metastore/bin'
+HIVE_BIN_DIR = format("{stack_root}/current/hive-metastore/bin")
HIVE_BIN_DIR_LEGACY = '/usr/lib/hive/bin'
CHECK_COMMAND_TIMEOUT_KEY = 'check.command.timeout'
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
index 941b70c..6c41fc2 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -24,7 +24,8 @@ from ambari_commons.os_family_impl import OsFamilyImpl
from resource_management.core.logger import Logger
from resource_management.core.exceptions import ClientComponentHasNoStatus
from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.script.script import Script
@@ -54,12 +55,13 @@ class HCatClientDefault(HCatClient):
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
- return {"HDP": "hive-webhcat"}
+ import params
+ return {params.stack_name: "hive-webhcat"}
def pre_upgrade_restart(self, env, upgrade_type=None):
"""
- Execute <stack-selector-tool> before reconfiguring this client to the new HDP version.
+ Execute <stack-selector-tool> before reconfiguring this client to the new stack version.
:param env:
:param upgrade_type:
@@ -70,9 +72,8 @@ class HCatClientDefault(HCatClient):
import params
env.set_params(params)
- # this function should not execute if the version can't be determined or
- # is not at least HDP 2.2.0.0
- if not params.version or compare_versions(params.version, "2.2", format=True) < 0:
+ # this function should not execute if the stack version does not support rolling upgrade
+ if not (params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version)):
return
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
index 4153821..07b4095 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
@@ -53,8 +53,8 @@ def hcat_service_check():
prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare {purge_tables}")
exec_path = params.execute_path
- if params.version and params.stack_name:
- upgrade_hive_bin = format("/usr/hdp/{version}/hive/bin")
+ if params.version and params.stack_root:
+ upgrade_hive_bin = format("{stack_root}/{version}/hive/bin")
exec_path = os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + os.pathsep + upgrade_hive_bin
Execute(prepare_cmd,
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 3c51d39..7d913e5 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -25,7 +25,8 @@ from urlparse import urlparse
from resource_management.libraries.script.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.core.resources.service import ServiceConfig
from resource_management.core.resources.system import File, Execute, Directory
from resource_management.core.source import StaticFile, Template, DownloadSource, InlineTemplate
@@ -106,8 +107,8 @@ def hive(name=None):
import params
if name == 'hiveserver2':
- # HDP 2.1.* or lower
- if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, "2.2.0.0") < 0:
+ # copy tarball to HDFS feature not supported
+ if not (params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major)):
params.HdfsResource(params.webhcat_apps_dir,
type="directory",
action="create_on_execute",
@@ -133,13 +134,13 @@ def hive(name=None):
# ****** Begin Copy Tarballs ******
# *********************************
- # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
- if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, '2.2') >= 0:
+ # if copy tarball to HDFS feature supported copy mapreduce.tar.gz and tez.tar.gz to HDFS
+ if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
# Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
- # This can use a different source and dest location to account for both HDP 2.1 and 2.2
+ # This can use a different source and dest location to account
copy_to_hdfs("pig",
params.user_group,
params.hdfs_user,
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index ba2a129..6af0cd3 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -21,6 +21,8 @@ import sys
from resource_management import *
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from hive import hive
from ambari_commons.os_family_impl import OsFamilyImpl
from ambari_commons import OSConst
@@ -48,14 +50,15 @@ class HiveClientWindows(HiveClient):
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HiveClientDefault(HiveClient):
def get_stack_to_component(self):
- return {"HDP": "hadoop-client"}
+ import params
+ return {params.stack_name: "hadoop-client"}
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing Hive client Stack Upgrade pre-restart")
import params
env.set_params(params)
- if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
conf_select.select(params.stack_name, "hive", params.version)
conf_select.select(params.stack_name, "hadoop", params.version)
stack_select.select("hadoop-client", params.version)
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 7fee2b3..8adff0a 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -27,7 +27,8 @@ from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.constants import Direction
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.version import format_stack_version
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.security_commons import build_expectations
from resource_management.libraries.functions.security_commons import cached_kinit_executor
from resource_management.libraries.functions.security_commons import get_params_from_filesystem
@@ -41,7 +42,7 @@ from hive_service import hive_service
from ambari_commons.os_family_impl import OsFamilyImpl
from ambari_commons import OSConst
-# the legacy conf.server location in HDP 2.2
+# the legacy conf.server location in previous stack versions
LEGACY_HIVE_SERVER_CONF = "/etc/hive/conf.server"
class HiveMetastore(Script):
@@ -83,7 +84,8 @@ class HiveMetastoreWindows(HiveMetastore):
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HiveMetastoreDefault(HiveMetastore):
def get_stack_to_component(self):
- return {"HDP": "hive-metastore"}
+ import params
+ return {params.stack_name: "hive-metastore"}
def status(self, env):
@@ -102,13 +104,13 @@ class HiveMetastoreDefault(HiveMetastore):
env.set_params(params)
- is_stack_hdp_23 = Script.is_stack_greater_or_equal("2.3")
is_upgrade = params.upgrade_direction == Direction.UPGRADE
- if is_stack_hdp_23 and is_upgrade:
+ if is_upgrade and params.stack_version_formatted_major and \
+ check_stack_feature(StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA, params.stack_version_formatted_major):
self.upgrade_schema(env)
- if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
conf_select.select(params.stack_name, "hive", params.version)
stack_select.select("hive-metastore", params.version)
@@ -194,7 +196,7 @@ class HiveMetastoreDefault(HiveMetastore):
# ensure that the JDBC drive is present for the schema tool; if it's not
# present, then download it first
if params.hive_jdbc_driver in params.hive_jdbc_drivers_list:
- target_directory = format("/usr/hdp/{version}/hive/lib")
+ target_directory = format("{stack_root}/{version}/hive/lib")
# download it if it does not exist
if not os.path.exists(params.source_jdbc_file):
@@ -222,15 +224,15 @@ class HiveMetastoreDefault(HiveMetastore):
File(target_directory_and_filename, mode = 0644)
# build the schema tool command
- binary = format("/usr/hdp/{version}/hive/bin/schematool")
+ binary = format("{stack_root}/{version}/hive/bin/schematool")
- # the conf.server directory changed locations between HDP 2.2 and 2.3
+ # the conf.server directory changed locations between stack versions
# since the configurations have not been written out yet during an upgrade
# we need to choose the original legacy location
schematool_hive_server_conf_dir = params.hive_server_conf_dir
if params.current_version is not None:
current_version = format_stack_version(params.current_version)
- if compare_versions(current_version, "2.3") < 0:
+ if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, current_version)):
schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
env_dict = {
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index f7f1377..9ba8619 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -27,7 +27,8 @@ from resource_management.libraries.functions import format
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.libraries.functions.version import compare_versions, format_stack_version
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
@@ -76,7 +77,8 @@ class HiveServerWindows(HiveServer):
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HiveServerDefault(HiveServer):
def get_stack_to_component(self):
- return {"HDP": "hive-server2"}
+ import params
+ return {params.stack_name: "hive-server2"}
def start(self, env, upgrade_type=None):
import params
@@ -117,7 +119,7 @@ class HiveServerDefault(HiveServer):
import params
env.set_params(params)
- if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
conf_select.select(params.stack_name, "hive", params.version)
stack_select.select("hive-server2", params.version)
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 8e4e6df..055296b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -66,7 +66,8 @@ class HiveServerWindows(HiveServerInteractive):
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HiveServerDefault(HiveServerInteractive):
def get_stack_to_component(self):
- return {"HDP": "hive-server2-hive2"}
+ import params
+ return {params.stack_name: "hive-server2-hive2"}
def start(self, env, upgrade_type=None):
import params
@@ -158,14 +159,10 @@ class HiveServerDefault(HiveServerInteractive):
Logger.info("Starting LLAP")
# TODO, start only if not already running.
-
- # TODO, remove this prop after Hive is parametrized
- stack_home = "/usr/hdp"
-
# TODO : Currently hardcoded the params. Need to read the suggested values from hive2/hive-site.xml.
# TODO, ensure that script works as hive from cmd when not cd'ed in /homve/hive
# Needs permission to write to hive home dir.
- cmd = format("{stack_home}/current/hive-server2-hive2/bin/hive --service llap --instances 1 -slider-am-container-mb {slider_am_container_mb} --loglevel INFO")
+ cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --instances 1 -slider-am-container-mb {slider_am_container_mb} --loglevel INFO")
run_file_path = None
try:
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 8045528..478c240 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -25,8 +25,9 @@ from resource_management.core.resources.system import Execute
from resource_management.core import shell
from resource_management.libraries.functions import format
from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.version import format_stack_version
-from resource_management.libraries.functions.version import compare_versions
def post_upgrade_deregister():
@@ -79,15 +80,15 @@ def _get_hive_execute_path(stack_version_formatted):
Returns the exact execute path to use for the given stack-version.
This method does not return the "current" path
:param stack_version_formatted: Exact stack-version to use in the new path
- :return: Hive execute path for the exact hdp stack-version
+ :return: Hive execute path for the exact stack-version
"""
import params
hive_execute_path = params.execute_path
formatted_stack_version = format_stack_version(stack_version_formatted)
- if formatted_stack_version and compare_versions(formatted_stack_version, "2.2") >= 0:
+ if formatted_stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
# hive_bin
- new_hive_bin = format('/usr/hdp/{stack_version_formatted}/hive/bin')
+ new_hive_bin = format('{stack_root}/{stack_version_formatted}/hive/bin')
if (os.pathsep + params.hive_bin) in hive_execute_path:
hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep + new_hive_bin)
# hadoop_bin_dir
@@ -118,8 +119,8 @@ def _get_current_hiveserver_version():
hive_execute_path = _get_hive_execute_path(source_version)
version_hive_bin = params.hive_bin
formatted_source_version = format_stack_version(source_version)
- if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
- version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
+ if formatted_source_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_source_version):
+ version_hive_bin = format('{stack_root}/{source_version}/hive/bin')
command = format('{version_hive_bin}/hive --version')
return_code, output = shell.call(command, user=params.hive_user, path=hive_execute_path)
except Exception, e:
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index 3aa6536..33e8e47 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -32,6 +32,8 @@ from resource_management.core.shell import as_user
from resource_management.libraries.functions.hive_check import check_thrift_port_sasl
from resource_management.libraries.functions import get_user_call_output
from resource_management.libraries.functions.show_logs import show_logs
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
@@ -61,8 +63,8 @@ def hive_service(name, action='start', upgrade_type=None):
pid_file = format("{hive_pid_dir}/{hive_pid}")
cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} {hive_log_dir}")
- if params.security_enabled and params.current_version != None and (params.current_version.startswith("2.2.4") or
- params.current_version.startswith("2.2.3")):
+
+ if params.security_enabled and params.current_version and check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, params.current_version):
hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
Execute(hive_kinit_cmd, user=params.hive_user)
@@ -83,9 +85,9 @@ def hive_service(name, action='start', upgrade_type=None):
if upgrade_type == UPGRADE_TYPE_ROLLING:
process_id_exists_command = None
- if params.version:
+ if params.version and params.stack_root:
import os
- hadoop_home = format("/usr/hdp/{version}/hadoop")
+ hadoop_home = format("{stack_root}/{version}/hadoop")
hive_bin = os.path.join(params.hive_bin, hive_bin)
Execute(daemon_cmd,
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index f1560ab..c7d57c0 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -31,11 +31,12 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.is_empty import is_empty
-from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.copy_tarball import STACK_VERSION_PATTERN
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.get_port_from_url import get_port_from_url
from resource_management.libraries.functions.expect import expect
from resource_management.libraries import functions
@@ -45,7 +46,9 @@ config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY
+stack_root = status_params.stack_root
stack_name = default("/hostLevelParams/stack_name", None)
+stack_name_uppercase = stack_name.upper()
agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
@@ -53,9 +56,8 @@ agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int
hostname = config["hostname"]
# This is expected to be of the form #.#.#.#
-stack_version_unformatted = config['hostLevelParams']['stack_version']
-stack_version_formatted_major = format_stack_version(stack_version_unformatted)
-stack_is_hdp21 = Script.is_stack_less_than("2.2")
+stack_version_unformatted = status_params.stack_version_unformatted
+stack_version_formatted_major = status_params.stack_version_formatted_major
# this is not available on INSTALL action because <stack-selector-tool> is not available
stack_version_formatted = functions.get_stack_version('hive-server2')
@@ -86,11 +88,11 @@ hive_interactive_bin = '/usr/lib/hive2/bin'
hive_interactive_lib = '/usr/lib/hive2/lib/'
hive_interactive_var_lib = '/var/lib/hive2'
-# These tar folders were used in HDP 2.1
+# These tar folders were used in previous stack versions
hadoop_streaming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
-hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
-sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+pig_tar_file = format('/usr/share/{stack_name_uppercase}-webhcat/pig.tar.gz')
+hive_tar_file = format('/usr/share/{stack_name_uppercase}-webhcat/hive.tar.gz')
+sqoop_tar_file = format('/usr/share/{stack_name_uppercase}-webhcat/sqoop*.tar.gz')
hive_specific_configs_supported = False
hive_etc_dir_prefix = "/etc/hive"
@@ -101,7 +103,7 @@ hive_user_nofile_limit = default("/configurations/hive-env/hive_user_nofile_limi
hive_user_nproc_limit = default("/configurations/hive-env/hive_user_nproc_limit", "16000")
# use the directories from status_params as they are already calculated for
-# the correct version of HDP
+# the correct stack version
hadoop_conf_dir = status_params.hadoop_conf_dir
hadoop_bin_dir = status_params.hadoop_bin_dir
webhcat_conf_dir = status_params.webhcat_conf_dir
@@ -117,50 +119,51 @@ config_dir = '/etc/hive-webhcat/conf'
hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-# Starting from HDP2.3 drop should be executed with purge suffix
purge_tables = "false"
-if Script.is_stack_greater_or_equal("2.3"):
+# Starting from stack version for feature hive_purge_table drop should be executed with purge
+if stack_version_formatted_major and check_stack_feature(StackFeature.HIVE_PURGE_TABLE, stack_version_formatted_major):
purge_tables = 'true'
- # this is NOT a typo. HDP-2.3 configs for hcatalog/webhcat point to a
+if stack_version_formatted_major and check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
+ # this is NOT a typo. Configs for hcatalog/webhcat point to a
# specific directory which is NOT called 'conf'
- hcat_conf_dir = '/usr/hdp/current/hive-webhcat/etc/hcatalog'
- config_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat'
+ hcat_conf_dir = format('{stack_root}/current/hive-webhcat/etc/hcatalog')
+ config_dir = format('{stack_root}/current/hive-webhcat/etc/webhcat')
-if Script.is_stack_greater_or_equal("2.2"):
+if stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major):
hive_specific_configs_supported = True
component_directory = status_params.component_directory
component_directory_interactive = status_params.component_directory_interactive
- hadoop_home = '/usr/hdp/current/hadoop-client'
- hive_bin = format('/usr/hdp/current/{component_directory}/bin')
- hive_interactive_bin = format('/usr/hdp/current/{component_directory_interactive}/bin')
- hive_lib = format('/usr/hdp/current/{component_directory}/lib')
- hive_interactive_lib = format('/usr/hdp/current/{component_directory_interactive}/lib')
+ hadoop_home = format('{stack_root}/current/hadoop-client')
+ hive_bin = format('{stack_root}/current/{component_directory}/bin')
+ hive_interactive_bin = format('{stack_root}/current/{component_directory_interactive}/bin')
+ hive_lib = format('{stack_root}/current/{component_directory}/lib')
+ hive_interactive_lib = format('{stack_root}/current/{component_directory_interactive}/lib')
# there are no client versions of these, use server versions directly
- hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
- webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+ hcat_lib = format('{stack_root}/current/hive-webhcat/share/hcatalog')
+ webhcat_bin_dir = format('{stack_root}/current/hive-webhcat/sbin')
# --- Tarballs ---
# DON'T CHANGE THESE VARIABLE NAMES
# Values don't change from those in copy_tarball.py
- hive_tar_source = "/usr/hdp/{0}/hive/hive.tar.gz".format(STACK_VERSION_PATTERN)
- pig_tar_source = "/usr/hdp/{0}/pig/pig.tar.gz".format(STACK_VERSION_PATTERN)
- hive_tar_dest_file = "/hdp/apps/{0}/hive/hive.tar.gz".format(STACK_VERSION_PATTERN)
- pig_tar_dest_file = "/hdp/apps/{0}/pig/pig.tar.gz".format(STACK_VERSION_PATTERN)
+ hive_tar_source = "{0}/{1}/hive/hive.tar.gz".format(stack_root, STACK_VERSION_PATTERN)
+ pig_tar_source = "{0}/{1}/pig/pig.tar.gz".format(stack_root, STACK_VERSION_PATTERN)
+ hive_tar_dest_file = "/{0}/apps/{1}/hive/hive.tar.gz".format(stack_name, STACK_VERSION_PATTERN)
+ pig_tar_dest_file = "/{0}/apps/{1}/pig/pig.tar.gz".format(stack_name, STACK_VERSION_PATTERN)
- hadoop_streaming_tar_source = "/usr/hdp/{0}/hadoop-mapreduce/hadoop-streaming.jar".format(STACK_VERSION_PATTERN)
- sqoop_tar_source = "/usr/hdp/{0}/sqoop/sqoop.tar.gz".format(STACK_VERSION_PATTERN)
- hadoop_streaming_tar_dest_dir = "/hdp/apps/{0}/mapreduce/".format(STACK_VERSION_PATTERN)
- sqoop_tar_dest_dir = "/hdp/apps/{0}/sqoop/".format(STACK_VERSION_PATTERN)
+ hadoop_streaming_tar_source = "{0}/{1}/hadoop-mapreduce/hadoop-streaming.jar".format(stack_root, STACK_VERSION_PATTERN)
+ sqoop_tar_source = "{0}/{1}/sqoop/sqoop.tar.gz".format(stack_root, STACK_VERSION_PATTERN)
+ hadoop_streaming_tar_dest_dir = "/{0}/apps/{1}/mapreduce/".format(stack_name, STACK_VERSION_PATTERN)
+ sqoop_tar_dest_dir = "/{0}/apps/{1}/sqoop/".format(stack_name, STACK_VERSION_PATTERN)
tarballs_mode = 0444
else:
# --- Tarballs ---
webhcat_apps_dir = "/apps/webhcat"
- # In HDP 2.1, the tarballs were copied from and to different locations.
+ # In previous versions, the tarballs were copied from and to different locations.
# DON'T CHANGE THESE VARIABLE NAMES
hive_tar_source = hive_tar_file
pig_tar_source = pig_tar_file
@@ -221,13 +224,13 @@ downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
target = format("{hive_lib}/{jdbc_jar_name}")
driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
-if Script.is_stack_less_than("2.2"):
+if not (stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major)):
source_jdbc_file = target
else:
# normally, the JDBC driver would be referenced by <stack-root>/current/.../foo.jar
# but in RU if <stack-selector-tool> is called and the restart fails, then this means that current pointer
# is now pointing to the upgraded version location; that's bad for the cp command
- source_jdbc_file = format("/usr/hdp/{current_version}/hive/lib/{jdbc_jar_name}")
+ source_jdbc_file = format("{stack_root}/{current_version}/hive/lib/{jdbc_jar_name}")
check_db_connection_jar_name = "DBConnectionVerification.jar"
check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
@@ -326,10 +329,10 @@ start_metastore_path = format("{tmp_dir}/start_metastore_script")
hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
- if Script.is_stack_less_than("2.2"):
- hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
- else:
+ if stack_version_formatted_major and check_stack_feature(StackFeature.HIVE_ENV_HEAPSIZE, stack_version_formatted_major):
hive_heapsize = config['configurations']['hive-env']['hive.heapsize']
+ else:
+ hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
else:
hive_heapsize = config['configurations']['hive-env']['hive.client.heapsize']
@@ -425,7 +428,7 @@ atlas_plugin_package = "atlas-metadata*-hive-plugin"
atlas_ubuntu_plugin_package = "atlas-metadata.*-hive-plugin"
if has_atlas:
- atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server'
+ atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else format('{stack_root}/current/atlas-server')
atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
# client.properties
atlas_client_props = {}
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index d71e061..feca9f1 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -23,6 +23,9 @@ from ambari_commons import OSCheck
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
@@ -44,6 +47,10 @@ component_directory_interactive = Script.get_component_from_role(SERVER_ROLE_DIR
config = Script.get_config()
+stack_root = Script.get_stack_root()
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted_major = format_stack_version(stack_version_unformatted)
+
if OSCheck.is_windows_family():
hive_metastore_win_service_name = "metastore"
hive_client_win_service_name = "hwi"
@@ -82,30 +89,27 @@ else:
hive_conf_dir = "/etc/hive/conf"
hive_client_conf_dir = "/etc/hive/conf"
- # !!! required by ranger to be at this location unless HDP 2.3+
hive_server_conf_dir = "/etc/hive/conf.server"
-
+
hive_server_interactive_conf_dir = "/etc/hive2/conf.server"
+
+ if stack_version_formatted_major and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted_major):
+ webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/conf")
+ hive_conf_dir = format("{stack_root}/current/{component_directory}/conf")
+ hive_client_conf_dir = format("{stack_root}/current/{component_directory}/conf")
+
+ if stack_version_formatted_major and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
+ hive_server_conf_dir = format("{stack_root}/current/{component_directory}/conf/conf.server")
+ hive_conf_dir = hive_server_conf_dir
- # HDP 2.2+
- if Script.is_stack_greater_or_equal("2.2"):
- webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
- hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
- hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
-
- # HDP 2.3+
- if Script.is_stack_greater_or_equal("2.3"):
- # ranger is only compatible with this location on HDP 2.3+, not HDP 2.2
- hive_server_conf_dir = format("/usr/hdp/current/{component_directory}/conf/conf.server")
-
-
- # this is NOT a typo. HDP-2.3 configs for hcatalog/webhcat point to a
+ if stack_version_formatted_major and check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
+ # this is NOT a typo. Configs for hcatalog/webhcat point to a
# specific directory which is NOT called 'conf'
- webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat'
- hive_conf_dir = hive_server_conf_dir
+ webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/etc/webhcat")
- if Script.is_stack_greater_or_equal("2.5"):
- hive_server_interactive_conf_dir = format("/usr/hdp/current/{component_directory_interactive}/conf")
+ # if stack version supports hive serve interactive
+ if stack_version_formatted_major and check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
+ hive_server_interactive_conf_dir = format("{stack_root}/current/{component_directory_interactive}/conf")
hive_config_dir = hive_client_conf_dir
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index b95f6f3..3acbc7b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -22,10 +22,12 @@ import sys
import os.path
from resource_management import *
from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
+
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def webhcat():
import params
@@ -90,9 +92,10 @@ def webhcat():
)
# if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
- if Script.is_stack_greater_or_equal("2.3") and params.version:
+ if params.stack_version_formatted_major and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \
+ params.version and params.stack_root:
XmlConfig("hive-site.xml",
- conf_dir = format("/usr/hdp/{version}/hive/conf"),
+ conf_dir = format("{stack_root}/{version}/hive/conf"),
configurations = params.config['configurations']['hive-site'],
configuration_attributes = params.config['configuration_attributes']['hive-site'],
owner = params.hive_user,
@@ -100,7 +103,7 @@ def webhcat():
)
XmlConfig("yarn-site.xml",
- conf_dir = format("/usr/hdp/{version}/hadoop/conf"),
+ conf_dir = format("{stack_root}/{version}/hadoop/conf"),
configurations = params.config['configurations']['yarn-site'],
configuration_attributes = params.config['configuration_attributes']['yarn-site'],
owner = params.yarn_user,
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index b0a876d..482f0be 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -21,6 +21,8 @@ Ambari Agent
from resource_management import *
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
@@ -63,7 +65,8 @@ class WebHCatServerWindows(WebHCatServer):
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class WebHCatServerDefault(WebHCatServer):
def get_stack_to_component(self):
- return {"HDP": "hive-webhcat"}
+ import params
+ return {params.stack_name: "hive-webhcat"}
def status(self, env):
import status_params
@@ -75,7 +78,7 @@ class WebHCatServerDefault(WebHCatServer):
import params
env.set_params(params)
- if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
# webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
conf_select.select(params.stack_name, "hive-hcatalog", params.version)
conf_select.select(params.stack_name, "hadoop", params.version)
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
index 2412c15..7d0a862 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -44,8 +44,8 @@ def webhcat_service(action='start', upgrade_type=None):
cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
if action == 'start':
- if upgrade_type is not None and params.version:
- environ['HADOOP_HOME'] = format("/usr/hdp/{version}/hadoop")
+ if upgrade_type is not None and params.version and params.stack_root:
+ environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop")
daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
no_op_test = as_user(format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1'), user=params.webhcat_user)
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index 0388aa2..03b7e01 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -158,6 +158,37 @@
"description": "Oozie in secured clusters uses _HOST in Kerberos principal (AMBARI-9775)",
"min_version": "2.0.0.0",
"max_version": "2.2.0.0"
- }
+ },
+ {
+ "name": "hive_metastore_upgrade_schema",
+ "description": "Hive metastore upgrade schema support (AMBARI-11176)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_server_interactive",
+ "description": "Hive server interactive support (AMBARI-15573)",
+ "min_version": "2.5.0.0"
+ },
+ {
+ "name": "hive_webhcat_specific_configs",
+ "description": "Hive webhcat specific configurations support (AMBARI-12364)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_purge_table",
+ "description": "Hive purge table support (AMBARI-12260)",
+ "min_version": "2.3.0.0"
+ },
+ {
+ "name": "hive_server2_kerberized_env",
+ "description": "Hive server2 working on kerberized environment (AMBARI-13749)",
+ "min_version": "2.2.3.0",
+ "max_version": "2.2.5.0"
+ },
+ {
+ "name": "hive_env_heapsize",
+ "description": "Hive heapsize property defined in hive-env (AMBARI-12801)",
+ "min_version": "2.2.0.0"
+ }
]
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/b18ecf01/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 8d0467a..26e3f72 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -44,7 +44,6 @@ class TestHiveServer(RMFTestCase):
Logger.logger = MagicMock()
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_configure_default(self, copy_to_hdfs_mock):
copy_to_hdfs_mock.return_value = True
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
@@ -59,7 +58,6 @@ class TestHiveServer(RMFTestCase):
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
@patch("socket.socket")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default(self, socket_mock, copy_to_hfds_mock):
copy_to_hfds_mock.return_value = None
s = socket_mock.return_value
@@ -96,7 +94,6 @@ class TestHiveServer(RMFTestCase):
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
@patch("socket.socket")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default_non_hdfs(self, socket_mock, copy_to_hfds_mock):
copy_to_hfds_mock.return_value = None
s = socket_mock.return_value
@@ -126,7 +123,6 @@ class TestHiveServer(RMFTestCase):
)
self.assertNoMoreResources()
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default_no_copy(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -157,7 +153,6 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default_alt_tmp(self, copy_to_hfds_mock):
copy_to_hfds_mock.return_value = None
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
@@ -189,7 +184,6 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_default_alt_nn_ha_tmp(self, copy_to_hfds_mock):
copy_to_hfds_mock.return_value = None
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
@@ -220,7 +214,6 @@ class TestHiveServer(RMFTestCase):
)
self.assertNoMoreResources()
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -246,7 +239,6 @@ class TestHiveServer(RMFTestCase):
self.assertNoMoreResources()
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -261,7 +253,6 @@ class TestHiveServer(RMFTestCase):
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
@patch("hive_service.check_fs_root")
@patch("socket.socket")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_start_secured(self, socket_mock, check_fs_root_mock, copy_to_hfds_mock):
s = socket_mock.return_value
copy_to_hfds_mock.return_value = None
@@ -299,7 +290,6 @@ class TestHiveServer(RMFTestCase):
@patch("socket.socket")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=False))
def test_stop_secured(self, socket_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
classname = "HiveServer",
@@ -735,7 +725,6 @@ class TestHiveServer(RMFTestCase):
self.assert_configure_default()
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=True))
@patch("os.path.exists", new = MagicMock(return_value=True))
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
def test_stop_during_upgrade(self, copy_to_hdfs_mock):
@@ -756,13 +745,12 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
)
self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'), sudo=True,)
- self.assertResourceCalledByIndex(31, 'Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+ self.assertResourceCalledByIndex(31, 'Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
tries=1, user='hive')
@patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
- @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=True))
def test_stop_during_upgrade_with_default_conf_server(self, copy_to_hdfs_mock):
hiveServerVersionOutput = """WARNING: Use "yarn jar" to launch YARN applications.
Hive 1.2.1.2.3.0.0-2434