You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mg...@apache.org on 2018/06/13 17:27:00 UTC

[ambari] branch trunk updated: AMBARI-24094 Hive Upgrade in Atlantic

This is an automated email from the ASF dual-hosted git repository.

mgergely pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 8d2336e  AMBARI-24094 Hive Upgrade in Atlantic
8d2336e is described below

commit 8d2336edfb8f94aa69f4d67233523e0d852a109f
Author: Miklos Gergely <mg...@hortonworks.com>
AuthorDate: Fri May 11 10:48:29 2018 +0200

    AMBARI-24094 Hive Upgrade in Atlantic
---
 .../0.12.0.2.0/package/scripts/params_linux.py     |  1 +
 .../HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py | 95 ++++++++++++++++++++++
 2 files changed, 96 insertions(+)

diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 5ce2d0c..d0a9d3d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -244,6 +244,7 @@ else:
   hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_user_passwd = unicode(hive_metastore_user_passwd) if not is_empty(hive_metastore_user_passwd) else hive_metastore_user_passwd
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+hive_db_schma_name = config['configurations']['hive-site']['ambari.hive.db.schema.name']
 
 #HACK Temporarily use dbType=azuredb while invoking schematool
 if hive_metastore_db_type == "mssql":
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py
new file mode 100644
index 0000000..9a017f1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+# Python Imports
+import os
+import shutil
+
+
+# Ambari Commons & Resource Management Imports
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Execute, Directory
+from resource_management.libraries.functions import upgrade_summary
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.script import Script
+
+class HivePreUpgrade(Script):
+
+  def backup_hive_metastore_database_local(self, env):
+    import params
+    env.set_params(params)
+
+    self.__dump_mysql_db(True)
+
+  def backup_hive_metastore_database_external(self, env):
+    import params
+    env.set_params(params)
+
+    is_db_here = params.hostname in params.hive_jdbc_connection_url
+    if params.hive_metastore_db_type == "mysql":
+      self.__dump_mysql_db(is_db_here)
+    elif params.hive_metastore_db_type == "postgres":
+      self.__dump_postgres_db(is_db_here)
+    elif params.hive_metastore_db_type == "oracle":
+      self.__dump_oracle_db(is_db_here)
+    else:
+      raise Fail(format("Unknown db type: {hive_metastore_db_type}. Please create a db backup manually, then click on 'IGNORE AND PROCEED'"))
+
+  def __dump_mysql_db(self, is_db_here):
+    command = format("mysqldump {hive_db_schma_name} > {{dump_file}}")
+    self.__dump_db(command, "mysql", is_db_here)
+
+  def __dump_postgres_db(self, is_db_here):
+    command = format("export PGPASSWORD='{hive_metastore_user_passwd}'; pg_dump -U {hive_metastore_user_name} {{hive_db_schma_name}} > {{dump_file}}")
+    self.__dump_db(command, "postgres", is_db_here)
+
+  def __dump_oracle_db(self, is_db_here):
+    command = format("exp userid={hive_metastore_user_name}/{hive_metastore_user_passwd} full=y file={{dump_file}}")
+    self.__dump_db(command, "oracle", is_db_here)
+
+  def __dump_db(self, command, type, is_db_here):
+    dump_dir = "/etc/hive/dbdump"
+    dump_file = format("{dump_dir}/hive-{stack_version_formatted}-{type}-dump.sql")
+    if is_db_here:
+      if not os.path.exists(dump_dir):
+        Directory(dump_dir)
+      Execute(format(command), user = "root")
+      Logger.info(format("Hive Metastore database backup created at {dump_file}"))
+    else:
+      Logger.warn(format("Hive Metastore is using an external {hive_metastore_db_type} database, the connection url is {hive_jdbc_connection_url}."))
+      Logger.warn(format("Please log in to that host, and create a db backup manually by executing the following command: \"{command}\", then click on 'IGNORE AND PROCEED'"))
+      raise Fail()
+
+  def convert_tables(self, env):
+    import params
+    env.set_params(params)
+    
+    source_version = upgrade_summary.get_source_version(service_name = "HIVE")
+    target_version = upgrade_summary.get_target_version(service_name = "HIVE")
+    
+    source_dir = format("/usr/hdp/{source_version}");
+    target_dir = format("/usr/hdp/{target_version}")
+    
+    classpath = format("{source_dir}/hive2/lib/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf")
+    cmd = format("{java64_home}/bin/java -cp {classpath} org.apache.hadoop.hive.upgrade.acid.PreUpgradeTool -execute")
+    Execute(cmd, user = "hive")
+
+if __name__ == "__main__":
+  HivePreUpgrade().execute()

-- 
To stop receiving notification emails like this one, please contact
mgergely@apache.org.