You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ha...@apache.org on 2017/11/21 15:05:25 UTC

ambari git commit: AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.6 eecd8513a -> a49be4af8


AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a49be4af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a49be4af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a49be4af

Branch: refs/heads/branch-2.6
Commit: a49be4af86484a55816cf8bf3aba06d24e176335
Parents: eecd851
Author: Dmytro Grinenko <ha...@apache.org>
Authored: Tue Nov 21 16:59:17 2017 +0200
Committer: Dmytro Grinenko <ha...@apache.org>
Committed: Tue Nov 21 16:59:17 2017 +0200

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py         | 60 ++++++++++----------
 1 file changed, 29 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a49be4af/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index b05c97c..d1c295d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -190,6 +190,7 @@ def get_sysprep_skip_copy_tarballs_hdfs():
     sysprep_skip_copy_tarballs_hdfs = default("/configurations/cluster-env/sysprep_skip_copy_tarballs_hdfs", False)
   return sysprep_skip_copy_tarballs_hdfs
 
+
 def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None):
   """
   For a given tarball name, get the source and destination paths to use.
@@ -197,50 +198,47 @@ def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_so
   :param use_upgrading_version_during_upgrade:
   :param custom_source_file: If specified, use this source path instead of the default one from the map.
   :param custom_dest_file: If specified, use this destination path instead of the default one from the map.
-  :return: A tuple of (success status, source path, destination path, optional preparation function which is invoked to setup the tarball)
+  :return: A tuple of success status, source path, destination path, optional preparation function which is invoked to setup the tarball
   """
   stack_name = Script.get_stack_name()
 
-  if not stack_name:
-    Logger.error("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name)))
-    return False, None, None
+  try:
+    if not stack_name:
+      raise ValueError("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name)))
 
-  if name is None or name.lower() not in TARBALL_MAP:
-    Logger.error("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name)))
-    return False, None, None
+    if name is None or name.lower() not in TARBALL_MAP:
+      raise ValueError("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name)))
 
-  service = TARBALL_MAP[name.lower()]['service']
+    service = TARBALL_MAP[name.lower()]
+    service_name = service['service']
+    stack_version = get_current_version(service=service_name, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade)
+    stack_root = Script.get_stack_root()
 
-  stack_version = get_current_version(service=service, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade)
-  if not stack_version:
-    Logger.error("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name)))
-    return False, None, None
+    if not stack_version or not stack_root:
+      raise ValueError("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name)))
 
-  stack_root = Script.get_stack_root()
-  if not stack_root:
-    Logger.error("Cannot copy {0} tarball to HDFS because stack root could be be determined.".format(str(name)))
-    return False, None, None
+    source_file, dest_file = service['dirs']
 
-  (source_file, dest_file) = TARBALL_MAP[name.lower()]['dirs']
+    if custom_source_file is not None:
+      source_file = custom_source_file
 
-  if custom_source_file is not None:
-    source_file = custom_source_file
+    if custom_dest_file is not None:
+      dest_file = custom_dest_file
 
-  if custom_dest_file is not None:
-    dest_file = custom_dest_file
+    source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower())
+    dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower())
 
-  source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower())
-  dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower())
+    source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower())
+    dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower())
 
-  source_file = source_file.replace(STACK_ROOT_PATTERN, stack_root.lower())
-  dest_file = dest_file.replace(STACK_ROOT_PATTERN, stack_root.lower())
+    source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version)
+    dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version)
 
-  source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version)
-  dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version)
+    prepare_function = service['prepare_function'] if "prepare_function" in service else None
 
-  prepare_function = None
-  if "prepare_function" in TARBALL_MAP[name.lower()]:
-    prepare_function = TARBALL_MAP[name.lower()]['prepare_function']
+  except ValueError as e:
+    Logger.error(str(e))
+    return False, None, None, None
 
   return True, source_file, dest_file, prepare_function
 
@@ -339,7 +337,7 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
   import params
 
   Logger.info("Called copy_to_hdfs tarball: {0}".format(name))
-  (success, source_file, dest_file, prepare_function) = get_tarball_paths(name, use_upgrading_version_during_upgrade,
+  success, source_file, dest_file, prepare_function = get_tarball_paths(name, use_upgrading_version_during_upgrade,
                                                                           custom_source_file, custom_dest_file)
 
   if not success: