You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ab...@apache.org on 2015/04/15 14:44:36 UTC
[3/3] ambari git commit: AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE
package scripts to common services
AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a68f8e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a68f8e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a68f8e4
Branch: refs/heads/trunk
Commit: 7a68f8e49bafbc34a316c0a9fe9046ab3e5c2ffd
Parents: 3160218
Author: Artem Baranchuk <ab...@hortonworks.con>
Authored: Wed Apr 15 14:56:34 2015 +0300
Committer: Artem Baranchuk <ab...@hortonworks.con>
Committed: Wed Apr 15 15:43:11 2015 +0300
----------------------------------------------------------------------
.../HIVE/0.12.0.2.0/package/scripts/hcat.py | 15 +
.../0.12.0.2.0/package/scripts/hcat_client.py | 18 +-
.../package/scripts/hcat_service_check.py | 11 +
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 47 +-
.../0.12.0.2.0/package/scripts/hive_client.py | 38 +-
.../package/scripts/hive_metastore.py | 41 +-
.../0.12.0.2.0/package/scripts/hive_server.py | 35 +-
.../0.12.0.2.0/package/scripts/hive_service.py | 16 +
.../HIVE/0.12.0.2.0/package/scripts/params.py | 397 +----
.../0.12.0.2.0/package/scripts/params_linux.py | 414 ++++++
.../package/scripts/params_windows.py | 54 +
.../0.12.0.2.0/package/scripts/service_check.py | 22 +-
.../0.12.0.2.0/package/scripts/status_params.py | 55 +-
.../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 12 +-
.../package/scripts/webhcat_server.py | 37 +-
.../package/scripts/webhcat_service.py | 10 +
.../package/scripts/webhcat_service_check.py | 11 +
.../HIVE/etc/hive-schema-0.12.0.mysql.sql | 777 ----------
.../HIVE/etc/hive-schema-0.12.0.oracle.sql | 717 ---------
.../HIVE/etc/hive-schema-0.12.0.postgres.sql | 1405 ------------------
.../HDPWIN/2.1/services/HIVE/metainfo.xml | 4 +
.../HIVE/package/scripts/hcat_client.py | 40 -
.../HIVE/package/scripts/hcat_service_check.py | 25 -
.../2.1/services/HIVE/package/scripts/hive.py | 61 -
.../HIVE/package/scripts/hive_client.py | 41 -
.../HIVE/package/scripts/hive_metastore.py | 53 -
.../HIVE/package/scripts/hive_server.py | 52 -
.../HIVE/package/scripts/mysql_server.py | 46 -
.../2.1/services/HIVE/package/scripts/params.py | 55 -
.../HIVE/package/scripts/service_check.py | 39 -
.../HIVE/package/scripts/service_mapping.py | 23 -
.../services/HIVE/package/scripts/webhcat.py | 30 -
.../HIVE/package/scripts/webhcat_server.py | 48 -
.../package/scripts/webhcat_service_check.py | 27 -
34 files changed, 750 insertions(+), 3926 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
index 31c1673..1f7893d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
@@ -20,8 +20,23 @@ limitations under the License.
from resource_management import *
import sys
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hcat():
+ import params
+
+ XmlConfig("hive-site.xml",
+ conf_dir = params.hive_conf_dir,
+ configurations = params.config['configurations']['hive-site'],
+ owner=params.hive_user,
+ configuration_attributes=params.config['configuration_attributes']['hive-site']
+ )
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hcat():
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
index 811cac6..79096e4 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -21,12 +21,11 @@ limitations under the License.
import sys
from resource_management import *
from hcat import hcat
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
-class HCatClient(Script):
-
- def get_stack_to_component(self):
- return {"HDP": "hadoop-client"}
+class HCatClient(Script):
def install(self, env):
import params
self.install_packages(env, exclude_packages=params.hive_exclude_packages)
@@ -41,5 +40,16 @@ class HCatClient(Script):
raise ClientComponentHasNoStatus()
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HCatClientWindows(HCatClient):
+ pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HCatClientDefault(HCatClient):
+ def get_stack_to_component(self):
+ return {"HDP": "hadoop-client"}
+
+
if __name__ == "__main__":
HCatClient().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
index fd4c6ca..e69d420 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
@@ -20,7 +20,18 @@ limitations under the License.
from resource_management import *
from resource_management.libraries.functions import get_unique_id_and_date
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hcat_service_check():
+ import params
+ smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+ service = "HCatalog"
+ Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hcat_service_check():
import params
unique = get_unique_id_and_date()
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 07148d7..a352797 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -19,10 +19,54 @@ limitations under the License.
"""
from resource_management import *
+from resource_management.libraries import functions
import sys
import os
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hive(name=None):
+ import params
+ XmlConfig("hive-site.xml",
+ conf_dir = params.hive_conf_dir,
+ configurations = params.config['configurations']['hive-site'],
+ owner=params.hive_user,
+ configuration_attributes=params.config['configuration_attributes']['hive-site']
+ )
+ if name in ["hiveserver2","metastore"]:
+ Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user)
+
+ if name == 'metastore':
+ if params.init_metastore_schema:
+ check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info '
+ '-dbType {hive_metastore_db_type} '
+ '-userName {hive_metastore_user_name} '
+ '-passWord {hive_metastore_user_passwd!p}'
+ '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually
+ hive_bin=params.hive_bin,
+ hive_metastore_db_type=params.hive_metastore_db_type,
+ hive_metastore_user_name=params.hive_metastore_user_name,
+ hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+ try:
+ Execute(check_schema_created_cmd)
+ except Fail:
+ create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema '
+ '-dbType {hive_metastore_db_type} '
+ '-userName {hive_metastore_user_name} '
+ '-passWord {hive_metastore_user_passwd!p}',
+ hive_bin=params.hive_bin,
+ hive_metastore_db_type=params.hive_metastore_db_type,
+ hive_metastore_user_name=params.hive_metastore_user_name,
+ hive_metastore_user_passwd=params.hive_metastore_user_passwd)
+ Execute(create_schema_cmd,
+ user = params.hive_user,
+ logoutput=True
+ )
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hive(name=None):
import params
@@ -179,7 +223,6 @@ def fill_conf_dir(component_conf_dir):
content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
)
-
def crt_directory(name):
import params
@@ -190,7 +233,6 @@ def crt_directory(name):
group=params.user_group,
mode=0755)
-
def crt_file(name):
import params
@@ -199,7 +241,6 @@ def crt_file(name):
group=params.user_group
)
-
def jdbc_connector():
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index f8306da..b390483 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -19,34 +19,42 @@ limitations under the License.
"""
import sys
from resource_management import *
-
from hive import hive
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
-class HiveClient(Script):
-
- def get_stack_to_component(self):
- return {"HDP": "hadoop-client"}
-
- def pre_rolling_restart(self, env):
- import params
- env.set_params(params)
-
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
- Execute(format("hdp-select set hadoop-client {version}"))
+class HiveClient(Script):
def install(self, env):
import params
self.install_packages(env, exclude_packages=params.hive_exclude_packages)
self.configure(env)
+ def status(self, env):
+ raise ClientComponentHasNoStatus()
+
def configure(self, env):
import params
env.set_params(params)
-
hive(name='client')
- def status(self, env):
- raise ClientComponentHasNoStatus()
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveClientWindows(HiveClient):
+ pass
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveClientDefault(HiveClient):
+ def get_stack_to_component(self):
+ return {"HDP": "hadoop-client"}
+
+ def pre_rolling_restart(self, env):
+ import params
+ env.set_params(params)
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-client {version}"))
+
if __name__ == "__main__":
HiveClient().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 8753133..feb00eb 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -25,51 +25,51 @@ from resource_management.libraries.functions.security_commons import build_expec
FILE_TYPE_XML
from hive import hive
from hive_service import hive_service
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
class HiveMetastore(Script):
-
- def get_stack_to_component(self):
- return {"HDP": "hive-metastore"}
-
def install(self, env):
import params
-
self.install_packages(env, exclude_packages = params.hive_exclude_packages)
-
- def configure(self, env):
+ def start(self, env, rolling_restart=False):
import params
-
env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ hive_service('metastore', action='start')
- hive(name = 'metastore')
-
-
- def start(self, env, rolling_restart = False):
+ def stop(self, env, rolling_restart=False):
import params
-
env.set_params(params)
- self.configure(env) # FOR SECURITY
- hive_service('metastore', action = 'start')
-
+ hive_service('metastore', action='stop')
- def stop(self, env, rolling_restart = False):
+ def configure(self, env):
import params
-
env.set_params(params)
- hive_service('metastore', action = 'stop' )
+ hive(name = 'metastore')
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveMetastoreWindows(HiveMetastore):
def status(self, env):
import status_params
+ check_windows_service_status(status_params.hive_metastore_win_service_name)
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveMetastoreDefault(HiveMetastore):
+ def get_stack_to_component(self):
+ return {"HDP": "hive-metastore"}
+
+ def status(self, env):
+ import status_params
env.set_params(status_params)
pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
# Recursively check all existing gmetad pid files
check_process_status(pid_file)
-
def pre_rolling_restart(self, env):
Logger.info("Executing Metastore Rolling Upgrade pre-restart")
import params
@@ -78,7 +78,6 @@ class HiveMetastore(Script):
if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
Execute(format("hdp-select set hive-metastore {version}"))
-
def security_status(self, env):
import status_params
env.set_params(status_params)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 0e03a22..ac32db1 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -27,23 +27,44 @@ from resource_management.libraries.functions.security_commons import build_expec
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
from setup_ranger_hive import setup_ranger_hive
+from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons import OSConst
-class HiveServer(Script):
-
- def get_stack_to_component(self):
- return {"HDP": "hive-server2"}
+class HiveServer(Script):
def install(self, env):
import params
self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-
def configure(self, env):
import params
env.set_params(params)
hive(name='hiveserver2')
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveServerWindows(HiveServer):
+ def start(self, env):
+ import params
+ env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ hive_service('hiveserver2', action='start')
+
+ def stop(self, env):
+ import params
+ env.set_params(params)
+ hive_service('hiveserver2', action='stop')
+
+ def status(self, env):
+ import status_params
+ check_windows_service_status(status_params.hive_server_win_service_name)
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveServerDefault(HiveServer):
+ def get_stack_to_component(self):
+ return {"HDP": "hive-server2"}
+
def start(self, env, rolling_restart=False):
import params
env.set_params(params)
@@ -55,7 +76,6 @@ class HiveServer(Script):
hive_service( 'hiveserver2', action = 'start',
rolling_restart=rolling_restart )
-
def stop(self, env, rolling_restart=False):
import params
env.set_params(params)
@@ -65,7 +85,6 @@ class HiveServer(Script):
else:
hive_service( 'hiveserver2', action = 'stop' )
-
def status(self, env):
import status_params
env.set_params(status_params)
@@ -74,7 +93,6 @@ class HiveServer(Script):
# Recursively check all existing gmetad pid files
check_process_status(pid_file)
-
def pre_rolling_restart(self, env):
Logger.info("Executing HiveServer2 Rolling Upgrade pre-restart")
import params
@@ -85,7 +103,6 @@ class HiveServer(Script):
copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
-
def security_status(self, env):
import status_params
env.set_params(status_params)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index 55ff6c9..f7266f1 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -23,7 +23,23 @@ import sys
import os
import time
from resource_management.core import shell
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hive_service(name, action='start', rolling_restart=False):
+ import params
+ if name == 'metastore':
+ if action == 'start' or action == 'stop':
+ Service(params.hive_metastore_win_service_name, action=action)
+
+ if name == 'hiveserver2':
+ if action == 'start' or action == 'stop':
+ Service(params.hive_server_win_service_name, action=action)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hive_service(name, action='start', rolling_restart=False):
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index e5f9679..a10c1d4 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -17,398 +17,9 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
+from ambari_commons import OSCheck
-from ambari_commons.constants import AMBARI_SUDO_BINARY
-from ambari_commons.os_check import OSCheck
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
-import os
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-sudo = AMBARI_SUDO_BINARY
-
-stack_name = default("/hostLevelParams/stack_name", None)
-
-# node hostname
-hostname = config["hostname"]
-
-# This is expected to be of the form #.#.#.#
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
-version = default("/commandParams/version", None)
-
-# Hadoop params
-# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
- # start out with client libraries
- hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
- hadoop_home = '/usr/hdp/current/hadoop-client'
- hive_bin = '/usr/hdp/current/hive-client/bin'
- hive_lib = '/usr/hdp/current/hive-client/lib'
-
- # if this is a server action, then use the server binaries; smoke tests
- # use the client binaries
- command_role = default("/role", "")
- server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
- 'HIVE_METASTORE' : 'hive-metastore' }
-
- if command_role in server_role_dir_mapping:
- hive_server_root = server_role_dir_mapping[command_role]
- hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
- hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
-
- # there are no client versions of these, use server versions directly
- hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
- webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
-
- hive_specific_configs_supported = True
-else:
- hadoop_bin_dir = "/usr/bin"
- hadoop_home = '/usr'
- hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
- hive_bin = '/usr/lib/hive/bin'
- hive_lib = '/usr/lib/hive/lib/'
- pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
- hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
- sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
-
- if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
- hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
- webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
- # for newer versions
- else:
- hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
- webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
- hive_specific_configs_supported = False
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hive_conf_dir_prefix = "/etc/hive"
-hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
-hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
-limits_conf_dir = "/etc/security/limits.d"
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
- hcat_conf_dir = '/etc/hcatalog/conf'
- config_dir = '/etc/hcatalog/conf'
-# for newer versions
-else:
- hcat_conf_dir = '/etc/hive-hcatalog/conf'
- config_dir = '/etc/hive-webhcat/conf'
-
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
-hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
-
-webhcat_conf_dir = status_params.webhcat_conf_dir
-hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
-hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
-#HACK Temporarily use dbType=azuredb while invoking schematool
-if hive_metastore_db_type == "mssql":
- hive_metastore_db_type = "azuredb"
-
-#users
-hive_user = config['configurations']['hive-env']['hive_user']
-#JDBC driver jar name
-hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
- jdbc_jar_name = "sqljdbc4.jar"
- jdbc_symlink_name = "mssql-jdbc-driver.jar"
-elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
- jdbc_jar_name = "mysql-connector-java.jar"
- jdbc_symlink_name = "mysql-jdbc-driver.jar"
-elif hive_jdbc_driver == "org.postgresql.Driver":
- jdbc_jar_name = "postgresql-jdbc.jar"
- jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
- jdbc_jar_name = "ojdbc.jar"
- jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-check_db_connection_jar_name = "DBConnectionVerification.jar"
-check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
-downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
-prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
-templeton_port = config['configurations']['webhcat-site']['templeton.port']
-
-
-#common
-hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
-hive_metastore_host = hive_metastore_hosts[0]
-hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
-hive_var_lib = '/var/lib/hive'
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
-hive_server_hosts = config['clusterHostInfo']['hive_server_host']
-hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
-if hive_transport_mode.lower() == "http":
- hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
-else:
- hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
-hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
-hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
-smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-
-hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
-
-#hive_env
-hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
-hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
-hive_pid_dir = status_params.hive_pid_dir
-hive_pid = status_params.hive_pid
-#Default conf dir for client
-hive_conf_dirs_list = [hive_client_conf_dir]
-
-if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
- hive_conf_dirs_list.append(hive_server_conf_dir)
-
-if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
- hive_config_dir = hive_server_conf_dir
-else:
- hive_config_dir = hive_client_conf_dir
-
-#hive-site
-hive_database_name = config['configurations']['hive-env']['hive_database_name']
-hive_database = config['configurations']['hive-env']['hive_database']
-
-#Starting hiveserver2
-start_hiveserver2_script = 'startHiveserver2.sh.j2'
-
-##Starting metastore
-start_metastore_script = 'startMetastore.sh'
-hive_metastore_pid = status_params.hive_metastore_pid
-java_share_dir = '/usr/share/java'
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
-
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
-
-target = format("{hive_lib}/{jdbc_jar_name}")
-
-jdk_location = config['hostLevelParams']['jdk_location']
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-
-start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
-start_metastore_path = format("{tmp_dir}/start_metastore_script")
-
-hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
-hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
-java64_home = config['hostLevelParams']['java_home']
-
-##### MYSQL
-
-db_name = config['configurations']['hive-env']['hive_database_name']
-mysql_group = 'mysql'
-mysql_host = config['clusterHostInfo']['hive_mysql_host']
-
-mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
-mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
-
-######## Metastore Schema
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
- init_metastore_schema = False
-else:
- init_metastore_schema = True
-
-########## HCAT
-
-hcat_dbroot = hcat_lib
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-hcat_env_sh_template = config['configurations']['hcat-env']['content']
-
-#hive-log4j.properties.template
-if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
- log4j_props = config['configurations']['hive-log4j']['content']
-else:
- log4j_props = None
-
-#webhcat-log4j.properties.template
-if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
- log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
-else:
- log4j_webhcat_props = None
-
-#hive-exec-log4j.properties.template
-if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
- log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
+if OSCheck.is_windows_family():
+ from params_windows import *
else:
- log4j_exec_props = None
-
-daemon_name = status_params.daemon_name
-process_name = status_params.process_name
-hive_env_sh_template = config['configurations']['hive-env']['content']
-
-hive_hdfs_user_dir = format("/user/{hive_user}")
-hive_hdfs_user_mode = 0700
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
-#for create_hdfs_directory
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
-
-# Tez-related properties
-tez_user = config['configurations']['tez-env']['tez_user']
-
-# Tez jars
-tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
-
-# Tez libraries
-tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-
-if OSCheck.is_ubuntu_family():
- mysql_configname = '/etc/mysql/my.cnf'
-else:
- mysql_configname = '/etc/my.cnf'
-
-mysql_user = 'mysql'
-
-# Hive security
-hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
-
-mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
-hive_use_existing_db = hive_database.startswith('Existing')
-hive_exclude_packages = []
-
-# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
-# trying to install mysql-connector-java upon them can cause packages to conflict.
-if hive_use_existing_db:
- hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server']
-else:
- if 'role' in config and config['role'] != "MYSQL_SERVER":
- hive_exclude_packages = ['mysql', 'mysql-server']
- if os.path.exists(mysql_jdbc_driver_jar):
- hive_exclude_packages.append('mysql-connector-java')
-
-########################################################
-########### WebHCat related params #####################
-########################################################
-
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.hcat_pid_dir
-
-webhcat_pid_file = status_params.webhcat_pid_file
-
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-security_param = "true" if security_enabled else "false"
-
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
- HdfsDirectory,
- conf_dir = hadoop_conf_dir,
- hdfs_user = hdfs_user,
- security_enabled = security_enabled,
- keytab = hdfs_user_keytab,
- kinit_path_local = kinit_path_local,
- bin_dir = hadoop_bin_dir
-)
-
-# ranger host
-ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
-has_ranger_admin = not len(ranger_admin_hosts) == 0
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
- # setting flag value for ranger hive plugin
- enable_ranger_hive = False
- ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
- if ranger_plugin_enable.lower() == 'yes':
- enable_ranger_hive = True
- elif ranger_plugin_enable.lower() == 'no':
- enable_ranger_hive = False
-
-#ranger hive properties
-policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
-sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
-xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
-repo_name = str(config['clusterName']) + '_hive'
-db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
-hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
-hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
-hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
-hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
-hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
-hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
-hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
-hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
-ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
-ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
-grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
-
-jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
-common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
-
-repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
-repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
-
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
-
-ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
-
-#For curl command in ranger plugin to get db connector
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
- ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
- ranger_jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
- ranger_jdbc_jar_name = "ojdbc6.jar"
- ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
-
-ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
-
-ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
-ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
-
-if security_enabled:
- hive_principal = hive_server_principal.replace('_HOST',hostname.lower())
+ from params_linux import *
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
new file mode 100644
index 0000000..e5f9679
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -0,0 +1,414 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.os_check import OSCheck
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from resource_management import *
+import status_params
+import os
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+sudo = AMBARI_SUDO_BINARY
+
+stack_name = default("/hostLevelParams/stack_name", None)
+
+# node hostname
+hostname = config["hostname"]
+
+# This is expected to be of the form #.#.#.#
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+# Hadoop params
+# TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+ # start out with client libraries
+ hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+ hadoop_home = '/usr/hdp/current/hadoop-client'
+ hive_bin = '/usr/hdp/current/hive-client/bin'
+ hive_lib = '/usr/hdp/current/hive-client/lib'
+
+ # if this is a server action, then use the server binaries; smoke tests
+ # use the client binaries
+ command_role = default("/role", "")
+ server_role_dir_mapping = { 'HIVE_SERVER' : 'hive-server2',
+ 'HIVE_METASTORE' : 'hive-metastore' }
+
+ if command_role in server_role_dir_mapping:
+ hive_server_root = server_role_dir_mapping[command_role]
+ hive_bin = format('/usr/hdp/current/{hive_server_root}/bin')
+ hive_lib = format('/usr/hdp/current/{hive_server_root}/lib')
+
+ # there are no client versions of these, use server versions directly
+ hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
+ webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+
+ hive_specific_configs_supported = True
+else:
+ hadoop_bin_dir = "/usr/bin"
+ hadoop_home = '/usr'
+ hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+ hive_bin = '/usr/lib/hive/bin'
+ hive_lib = '/usr/lib/hive/lib/'
+ pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
+ hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
+ sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
+
+ if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+ hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+ webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+ # for newer versions
+ else:
+ hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+ webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+
+ hive_specific_configs_supported = False
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hive_conf_dir_prefix = "/etc/hive"
+hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
+hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
+limits_conf_dir = "/etc/security/limits.d"
+
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+ hcat_conf_dir = '/etc/hcatalog/conf'
+ config_dir = '/etc/hcatalog/conf'
+# for newer versions
+else:
+ hcat_conf_dir = '/etc/hive-hcatalog/conf'
+ config_dir = '/etc/hive-webhcat/conf'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+
+webhcat_conf_dir = status_params.webhcat_conf_dir
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+#HACK Temporarily use dbType=azuredb while invoking schematool
+if hive_metastore_db_type == "mssql":
+ hive_metastore_db_type = "azuredb"
+
+#users
+hive_user = config['configurations']['hive-env']['hive_user']
+#JDBC driver jar name
+hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
+if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
+ jdbc_jar_name = "sqljdbc4.jar"
+ jdbc_symlink_name = "mssql-jdbc-driver.jar"
+elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
+ jdbc_jar_name = "mysql-connector-java.jar"
+ jdbc_symlink_name = "mysql-jdbc-driver.jar"
+elif hive_jdbc_driver == "org.postgresql.Driver":
+ jdbc_jar_name = "postgresql-jdbc.jar"
+ jdbc_symlink_name = "postgres-jdbc-driver.jar"
+elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+ jdbc_jar_name = "ojdbc.jar"
+ jdbc_symlink_name = "oracle-jdbc-driver.jar"
+
+check_db_connection_jar_name = "DBConnectionVerification.jar"
+check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
+templeton_port = config['configurations']['webhcat-site']['templeton.port']
+
+
+#common
+hive_metastore_hosts = config['clusterHostInfo']['hive_metastore_host']
+hive_metastore_host = hive_metastore_hosts[0]
+hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"
+hive_var_lib = '/var/lib/hive'
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
+hive_server_hosts = config['clusterHostInfo']['hive_server_host']
+hive_transport_mode = config['configurations']['hive-site']['hive.server2.transport.mode']
+if hive_transport_mode.lower() == "http":
+ hive_server_port = config['configurations']['hive-site']['hive.server2.thrift.http.port']
+else:
+ hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',"10000")
+hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
+hive_server_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+hive_server2_authentication = config['configurations']['hive-site']['hive.server2.authentication']
+
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
+smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+
+fs_root = config['configurations']['core-site']['fs.defaultFS']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+
+hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
+
+#hive_env
+hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
+hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
+hive_pid_dir = status_params.hive_pid_dir
+hive_pid = status_params.hive_pid
+#Default conf dir for client
+hive_conf_dirs_list = [hive_client_conf_dir]
+
+if hostname in hive_metastore_hosts or hostname in hive_server_hosts:
+ hive_conf_dirs_list.append(hive_server_conf_dir)
+
+if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
+ hive_config_dir = hive_server_conf_dir
+else:
+ hive_config_dir = hive_client_conf_dir
+
+#hive-site
+hive_database_name = config['configurations']['hive-env']['hive_database_name']
+hive_database = config['configurations']['hive-env']['hive_database']
+
+#Starting hiveserver2
+start_hiveserver2_script = 'startHiveserver2.sh.j2'
+
+##Starting metastore
+start_metastore_script = 'startMetastore.sh'
+hive_metastore_pid = status_params.hive_metastore_pid
+java_share_dir = '/usr/share/java'
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+
+target = format("{hive_lib}/{jdbc_jar_name}")
+
+jdk_location = config['hostLevelParams']['jdk_location']
+driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
+
+start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
+start_metastore_path = format("{tmp_dir}/start_metastore_script")
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
+java64_home = config['hostLevelParams']['java_home']
+
+##### MYSQL
+
+db_name = config['configurations']['hive-env']['hive_database_name']
+mysql_group = 'mysql'
+mysql_host = config['clusterHostInfo']['hive_mysql_host']
+
+mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
+mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
+
+######## Metastore Schema
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") < 0:
+ init_metastore_schema = False
+else:
+ init_metastore_schema = True
+
+########## HCAT
+
+hcat_dbroot = hcat_lib
+
+hcat_user = config['configurations']['hive-env']['hcat_user']
+webhcat_user = config['configurations']['hive-env']['webhcat_user']
+
+hcat_pid_dir = status_params.hcat_pid_dir
+hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+hcat_env_sh_template = config['configurations']['hcat-env']['content']
+
+#hive-log4j.properties.template
+if (('hive-log4j' in config['configurations']) and ('content' in config['configurations']['hive-log4j'])):
+ log4j_props = config['configurations']['hive-log4j']['content']
+else:
+ log4j_props = None
+
+#webhcat-log4j.properties.template
+if (('webhcat-log4j' in config['configurations']) and ('content' in config['configurations']['webhcat-log4j'])):
+ log4j_webhcat_props = config['configurations']['webhcat-log4j']['content']
+else:
+ log4j_webhcat_props = None
+
+#hive-exec-log4j.properties.template
+if (('hive-exec-log4j' in config['configurations']) and ('content' in config['configurations']['hive-exec-log4j'])):
+ log4j_exec_props = config['configurations']['hive-exec-log4j']['content']
+else:
+ log4j_exec_props = None
+
+daemon_name = status_params.daemon_name
+process_name = status_params.process_name
+hive_env_sh_template = config['configurations']['hive-env']['content']
+
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+#for create_hdfs_directory
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', 'missing_principal').replace("_HOST", hostname)
+
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
+if OSCheck.is_ubuntu_family():
+ mysql_configname = '/etc/mysql/my.cnf'
+else:
+ mysql_configname = '/etc/my.cnf'
+
+mysql_user = 'mysql'
+
+# Hive security
+hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
+
+mysql_jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
+hive_use_existing_db = hive_database.startswith('Existing')
+hive_exclude_packages = []
+
+# There are other packages that contain /usr/share/java/mysql-connector-java.jar (like libmysql-java),
+# trying to install mysql-connector-java upon them can cause packages to conflict.
+if hive_use_existing_db:
+ hive_exclude_packages = ['mysql-connector-java', 'mysql', 'mysql-server']
+else:
+ if 'role' in config and config['role'] != "MYSQL_SERVER":
+ hive_exclude_packages = ['mysql', 'mysql-server']
+ if os.path.exists(mysql_jdbc_driver_jar):
+ hive_exclude_packages.append('mysql-connector-java')
+
+########################################################
+########### WebHCat related params #####################
+########################################################
+
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.hcat_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+ HdfsDirectory,
+ conf_dir = hadoop_conf_dir,
+ hdfs_user = hdfs_user,
+ security_enabled = security_enabled,
+ keytab = hdfs_user_keytab,
+ kinit_path_local = kinit_path_local,
+ bin_dir = hadoop_bin_dir
+)
+
+# ranger host
+ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
+has_ranger_admin = not len(ranger_admin_hosts) == 0
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
+ # setting flag value for ranger hive plugin
+ enable_ranger_hive = False
+ ranger_plugin_enable = default("/configurations/ranger-hive-plugin-properties/ranger-hive-plugin-enabled", "no")
+ if ranger_plugin_enable.lower() == 'yes':
+ enable_ranger_hive = True
+ elif ranger_plugin_enable.lower() == 'no':
+ enable_ranger_hive = False
+
+#ranger hive properties
+policymgr_mgr_url = default("/configurations/admin-properties/policymgr_external_url", "http://localhost:6080")
+sql_connector_jar = default("/configurations/admin-properties/SQL_CONNECTOR_JAR", "/usr/share/java/mysql-connector-java.jar")
+xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", "MYSQL")
+xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", "ranger_audit")
+xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", "rangerlogger")
+xa_audit_db_password = default("/configurations/admin-properties/audit_db_password", "rangerlogger")
+xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
+repo_name = str(config['clusterName']) + '_hive'
+db_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.DB.IS_ENABLED", "false")
+hdfs_enabled = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.IS_ENABLED", "false")
+hdfs_dest_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY", "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
+hdfs_buffer_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit")
+hdfs_archive_dir = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY", "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
+hdfs_dest_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE", "hostname-audit.log")
+hdfs_dest_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS", "900")
+hdfs_dest_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS", "86400")
+hdfs_dest_open_retry_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS", "60")
+hdfs_buffer_file = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE", "time:yyyyMMdd-HHmm.ss.log")
+hdfs_buffer_flush_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS", "60")
+hdfs_buffer_rollover_int_sec = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS", "600")
+hdfs_archive_max_file_count = default("/configurations/ranger-hive-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT", "10")
+ssl_keystore_file = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-keystore.jks")
+ssl_keystore_password = default("/configurations/ranger-hive-plugin-properties/SSL_KEYSTORE_PASSWORD", "myKeyFilePassword")
+ssl_truststore_file = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_FILE_PATH", "/etc/hadoop/conf/ranger-plugin-truststore.jks")
+ssl_truststore_password = default("/configurations/ranger-hive-plugin-properties/SSL_TRUSTSTORE_PASSWORD", "changeit")
+grant_revoke = default("/configurations/ranger-hive-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
+
+jdbc_driver_class_name = default("/configurations/ranger-hive-plugin-properties/jdbc.driverClassName","")
+common_name_for_certificate = default("/configurations/ranger-hive-plugin-properties/common.name.for.certificate", "-")
+
+repo_config_username = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_USERNAME", "hive")
+repo_config_password = default("/configurations/ranger-hive-plugin-properties/REPOSITORY_CONFIG_PASSWORD", "hive")
+
+admin_uname = default("/configurations/ranger-env/admin_username", "admin")
+admin_password = default("/configurations/ranger-env/admin_password", "admin")
+admin_uname_password = format("{admin_uname}:{admin_password}")
+
+ambari_ranger_admin = default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
+ambari_ranger_password = default("/configurations/ranger-env/ranger_admin_password", "ambari123")
+policy_user = default("/configurations/ranger-hive-plugin-properties/policy_user", "ambari-qa")
+
+#For curl command in ranger plugin to get db connector
+if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
+ ranger_jdbc_symlink_name = "mysql-jdbc-driver.jar"
+ ranger_jdbc_jar_name = "mysql-connector-java.jar"
+elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
+ ranger_jdbc_jar_name = "ojdbc6.jar"
+ ranger_jdbc_symlink_name = "oracle-jdbc-driver.jar"
+
+ranger_downloaded_custom_connector = format("{tmp_dir}/{ranger_jdbc_jar_name}")
+
+ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
+ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
+
+if security_enabled:
+ hive_principal = hive_server_principal.replace('_HOST',hostname.lower())
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
new file mode 100644
index 0000000..a9395a6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from status_params import *
+
+# server configurations
+config = Script.get_config()
+
+# This is expected to be of the form #.#.#.#
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+
+hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+hive_conf_dir = os.environ["HIVE_CONF_DIR"]
+hive_home = os.environ["HIVE_HOME"]
+hive_lib_dir = os.environ["HIVE_LIB_DIR"]
+hive_log_dir = os.environ["HIVE_LOG_DIR"]
+hive_opts = os.environ["HIVE_OPTS"]
+hcat_home = os.environ["HCAT_HOME"]
+hcat_config_dir = os.environ["WEBHCAT_CONF_DIR"]
+
+hive_env_sh_template = config['configurations']['hive-env']['content']
+hive_warehouse_dir = config['configurations']['hive-site']['hive.metastore.warehouse.dir']
+hive_user = "hadoop"
+hadoop_user = "hadoop"
+hcat_user = "hadoop"
+
+hive_bin = os.path.join(hive_home, "bin")
+hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+
+hive_exclude_packages = []
+
+######## Metastore Schema
+init_metastore_schema = config['configurations']['hive-site']['datanucleus.autoCreateSchema']
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
index 9f822a9..03af5b6 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
@@ -21,11 +21,31 @@ limitations under the License.
from resource_management import *
import socket
import sys
-
from hcat_service_check import hcat_service_check
from webhcat_service_check import webhcat_service_check
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
+
class HiveServiceCheck(Script):
+ pass
+
+
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class HiveServiceCheckWindows(HiveServiceCheck):
+ def service_check(self, env):
+ import params
+ env.set_params(params)
+ smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+ service = "HIVE"
+ Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hive_user, logoutput=True)
+
+ hcat_service_check()
+ webhcat_service_check()
+
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class HiveServiceCheckDefault(HiveServiceCheck):
def service_check(self, env):
import params
env.set_params(params)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index 504f8d6..5dcdbf5 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -23,29 +23,34 @@ from ambari_commons.os_check import OSCheck
config = Script.get_config()
-hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
-hive_pid = 'hive-server.pid'
-
-hive_metastore_pid = 'hive.pid'
-
-hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
-webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
-
-process_name = 'mysqld'
-if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family():
- daemon_name = 'mysql'
+if OSCheck.is_windows_family():
+ hive_metastore_win_service_name = "metastore"
+ hive_client_win_service_name = "hwi"
+ hive_server_win_service_name = "hiveserver2"
+ webhcat_server_win_service_name = "templeton"
else:
- daemon_name = 'mysqld'
-
-
-# Security related/required params
-hostname = config['hostname']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-hadoop_conf_dir = "/etc/hadoop/conf"
-kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-tmp_dir = Script.get_tmp_dir()
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hive_user = config['configurations']['hive-env']['hive_user']
-hive_conf_dir = "/etc/hive/conf"
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-webhcat_conf_dir = '/etc/hive-webhcat/conf'
+ hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
+ hive_pid = 'hive-server.pid'
+
+ hive_metastore_pid = 'hive.pid'
+
+ hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+ webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
+
+ process_name = 'mysqld'
+ if OSCheck.is_suse_family() or OSCheck.is_ubuntu_family():
+ daemon_name = 'mysql'
+ else:
+ daemon_name = 'mysqld'
+
+ # Security related/required params
+ hostname = config['hostname']
+ security_enabled = config['configurations']['cluster-env']['security_enabled']
+ hadoop_conf_dir = "/etc/hadoop/conf"
+ kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+ tmp_dir = Script.get_tmp_dir()
+ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+ hive_user = config['configurations']['hive-env']['hive_user']
+ hive_conf_dir = "/etc/hive/conf"
+ webhcat_user = config['configurations']['hive-env']['webhcat_user']
+ webhcat_conf_dir = '/etc/hive-webhcat/conf'
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 4f1d564..d396f11 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -21,13 +21,23 @@ Ambari Agent
import sys
import os.path
import glob
-
from resource_management import *
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions.version import compare_versions
from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat():
+ import params
+ XmlConfig("webhcat-site.xml",
+ conf_dir=params.hcat_config_dir,
+ configurations=params.config['configurations']['webhcat-site']
+ )
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def webhcat():
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index 2b470a9..0373a0d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -24,42 +24,49 @@ from resource_management.libraries.functions.security_commons import build_expec
FILE_TYPE_XML
from webhcat import webhcat
from webhcat_service import webhcat_service
+from ambari_commons import OSConst
+from ambari_commons.os_family_impl import OsFamilyImpl
-class WebHCatServer(Script):
-
- def get_stack_to_component(self):
- return {"HDP": "hive-webhcat"}
+class WebHCatServer(Script):
def install(self, env):
import params
self.install_packages(env, exclude_packages=params.hive_exclude_packages)
-
- def configure(self, env):
- import params
- env.set_params(params)
- webhcat()
-
-
def start(self, env, rolling_restart=False):
import params
env.set_params(params)
self.configure(env) # FOR SECURITY
- webhcat_service(action = 'start')
-
+ webhcat_service(action='start')
def stop(self, env, rolling_restart=False):
import params
env.set_params(params)
+ webhcat_service(action='stop')
- webhcat_service(action = 'stop')
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ webhcat()
+@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
+class WebHCatServerWindows(WebHCatServer):
def status(self, env):
import status_params
env.set_params(status_params)
- check_process_status(status_params.webhcat_pid_file)
+ check_windows_service_status(status_params.webhcat_server_win_service_name)
+
+@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
+class WebHCatServerDefault(WebHCatServer):
+ def get_stack_to_component(self):
+ return {"HDP": "hive-webhcat"}
+
+ def status(self, env):
+ import status_params
+ env.set_params(status_params)
+ check_process_status(status_params.webhcat_pid_file)
def pre_rolling_restart(self, env):
Logger.info("Executing WebHCat Rolling Upgrade pre-restart")
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
index d1251df..7fb82da 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -19,7 +19,17 @@ Ambari Agent
"""
from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat_service(action='start'):
+ import params
+ if action == 'start' or action == 'stop':
+ Service(params.webhcat_server_win_service_name, action=action)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def webhcat_service(action='start'):
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
index bea1f56..9b59f30 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -19,7 +19,18 @@ limitations under the License.
"""
from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def webhcat_service_check():
+ import params
+ smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+ service = "WEBHCAT"
+ Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
+
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def webhcat_service_check():
import params
File(format("{tmp_dir}/templetonSmoke.sh"),