You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by is...@apache.org on 2018/07/27 17:03:43 UTC

[ambari] branch branch-2.7 updated: Revert hive pre-upgrade script commits in PR #1886

This is an automated email from the ASF dual-hosted git repository.

ishanbha pushed a commit to branch branch-2.7
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-2.7 by this push:
     new 3503896  Revert hive pre-upgrade script commits in PR #1886
3503896 is described below

commit 3503896cd57e2a520c350b41e5ec9b62409037a2
Author: Ishan Bhatt <is...@gmail.com>
AuthorDate: Fri Jul 27 09:58:33 2018 -0700

    Revert hive pre-upgrade script commits in PR #1886
---
 .../HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py                | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py
index c9fd147..4db8b8b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/pre_upgrade.py
@@ -21,7 +21,7 @@ limitations under the License.
 import os
 import shutil
 import traceback
-import glob
+
 
 # Ambari Commons & Resource Management Imports
 from resource_management.core.exceptions import Fail
@@ -99,11 +99,7 @@ class HivePreUpgrade(Script):
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
       Execute(hive_kinit_cmd, user = params.hive_user)
     
-    classpath = format("{source_dir}/hive2/lib/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{source_dir}/hadoop-hdfs/*:{source_dir}/hadoop-hdfs/lib/*:{source_dir}/hadoop/etc/hadoop/:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf/conf.server")
-    # hack to avoid derby cp issue we want derby-10.10.2.0.jar to appear first in cp, if its available, note other derby jars are derbyclient-10.11.1.1.jar  derbynet-10.11.1.1.jar
-    derby_jars = glob.glob(source_dir+"/hive2/lib/*derby-*.jar")
-    if len(derby_jars) == 1:
-      classpath = derby_jars[0] + ":" + classpath
+    classpath = format("{source_dir}/hive2/lib/*:{source_dir}/hadoop/*:{source_dir}/hadoop/lib/*:{source_dir}/hadoop-mapreduce/*:{source_dir}/hadoop-mapreduce/lib/*:{target_dir}/hive/lib/hive-pre-upgrade.jar:{source_dir}/hive/conf")
     cmd = format("{java64_home}/bin/java -Djavax.security.auth.useSubjectCredsOnly=false -cp {classpath} org.apache.hadoop.hive.upgrade.acid.PreUpgradeTool -execute")
     Execute(cmd, user = params.hive_user)